var/home/core/zuul-output/0000755000175000017500000000000015067034412014527 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015067043553015501 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005021276215067043475017715 0ustar rootrootSep 30 20:13:33 crc systemd[1]: Starting Kubernetes Kubelet... Sep 30 20:13:33 crc restorecon[4667]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 20:13:33 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 20:13:34 crc restorecon[4667]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 30 20:13:34 crc restorecon[4667]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 30 20:13:35 crc kubenswrapper[4919]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 20:13:35 crc kubenswrapper[4919]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 30 20:13:35 crc kubenswrapper[4919]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 20:13:35 crc kubenswrapper[4919]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 20:13:35 crc kubenswrapper[4919]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 30 20:13:35 crc kubenswrapper[4919]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.358524 4919 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368086 4919 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368124 4919 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368135 4919 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368146 4919 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368156 4919 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368164 4919 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368172 4919 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368180 4919 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368187 4919 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368195 4919 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368203 4919 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368235 4919 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368246 4919 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368258 4919 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368267 4919 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368276 4919 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368287 4919 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368297 4919 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368309 4919 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368320 4919 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368331 4919 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368340 4919 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368348 4919 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368356 4919 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368364 4919 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368371 4919 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368381 4919 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368388 4919 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368396 4919 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368417 4919 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368425 4919 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368432 4919 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368440 4919 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368450 4919 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368460 4919 feature_gate.go:330] unrecognized feature gate: Example Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368468 4919 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368477 4919 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368485 4919 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368493 4919 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368502 4919 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368511 4919 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368519 4919 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368527 4919 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368534 4919 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368545 4919 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368554 4919 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368563 4919 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368571 4919 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368578 4919 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368586 4919 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368594 4919 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368602 4919 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368609 4919 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368663 4919 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368675 4919 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368684 4919 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368692 4919 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368701 4919 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368709 4919 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368718 4919 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368726 4919 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368735 4919 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368743 4919 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368751 4919 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368759 4919 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368768 4919 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368776 4919 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368785 4919 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368792 4919 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368800 4919 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.368808 4919 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.368962 4919 flags.go:64] FLAG: --address="0.0.0.0" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.368985 4919 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369008 4919 flags.go:64] FLAG: --anonymous-auth="true" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369023 4919 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369038 4919 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369050 4919 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369065 4919 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369081 4919 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369093 4919 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369104 4919 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369116 4919 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369128 4919 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369140 4919 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369152 4919 flags.go:64] FLAG: --cgroup-root="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369163 4919 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369176 4919 flags.go:64] FLAG: --client-ca-file="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369187 4919 flags.go:64] FLAG: --cloud-config="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369198 4919 flags.go:64] FLAG: --cloud-provider="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369208 4919 flags.go:64] FLAG: --cluster-dns="[]" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369292 4919 flags.go:64] FLAG: --cluster-domain="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369306 4919 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369318 4919 flags.go:64] FLAG: --config-dir="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369329 4919 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369342 4919 flags.go:64] FLAG: --container-log-max-files="5" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369358 4919 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369369 4919 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369381 4919 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369393 4919 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369405 4919 flags.go:64] FLAG: --contention-profiling="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369417 4919 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369428 4919 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369440 4919 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369451 4919 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369466 4919 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369479 4919 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369490 4919 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369501 4919 flags.go:64] FLAG: --enable-load-reader="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369513 4919 flags.go:64] FLAG: --enable-server="true" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369524 4919 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369541 4919 flags.go:64] FLAG: --event-burst="100" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369552 4919 flags.go:64] FLAG: --event-qps="50" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369564 4919 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369576 4919 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369591 4919 flags.go:64] FLAG: --eviction-hard="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369605 4919 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369617 4919 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369629 4919 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369641 4919 flags.go:64] FLAG: --eviction-soft="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369652 4919 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369664 4919 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369676 4919 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369687 4919 flags.go:64] FLAG: --experimental-mounter-path="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369698 4919 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369709 4919 flags.go:64] FLAG: --fail-swap-on="true" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369721 4919 flags.go:64] FLAG: --feature-gates="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369734 4919 flags.go:64] FLAG: --file-check-frequency="20s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369746 4919 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369758 4919 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369770 4919 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369782 4919 flags.go:64] FLAG: --healthz-port="10248" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369794 4919 flags.go:64] FLAG: --help="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369805 4919 flags.go:64] FLAG: --hostname-override="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369816 4919 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369828 4919 flags.go:64] FLAG: --http-check-frequency="20s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369840 4919 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369851 4919 flags.go:64] FLAG: --image-credential-provider-config="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369863 4919 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369874 4919 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369886 4919 flags.go:64] FLAG: --image-service-endpoint="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369898 4919 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369908 4919 flags.go:64] FLAG: --kube-api-burst="100" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369920 4919 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369932 4919 flags.go:64] FLAG: --kube-api-qps="50" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369944 4919 flags.go:64] FLAG: --kube-reserved="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369955 4919 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369966 4919 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369978 4919 flags.go:64] FLAG: --kubelet-cgroups="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.369989 4919 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370001 4919 flags.go:64] FLAG: --lock-file="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370016 4919 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370027 4919 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370039 4919 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370057 4919 flags.go:64] FLAG: --log-json-split-stream="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370068 4919 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370080 4919 flags.go:64] FLAG: --log-text-split-stream="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370091 4919 flags.go:64] FLAG: --logging-format="text" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370102 4919 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370115 4919 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370126 4919 flags.go:64] FLAG: --manifest-url="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370139 4919 flags.go:64] FLAG: --manifest-url-header="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370156 4919 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370168 4919 flags.go:64] FLAG: --max-open-files="1000000" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370182 4919 flags.go:64] FLAG: --max-pods="110" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370194 4919 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370206 4919 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370258 4919 flags.go:64] FLAG: --memory-manager-policy="None" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370271 4919 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370284 4919 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370297 4919 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370309 4919 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370337 4919 flags.go:64] FLAG: --node-status-max-images="50" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370349 4919 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370360 4919 flags.go:64] FLAG: --oom-score-adj="-999" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370371 4919 flags.go:64] FLAG: --pod-cidr="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370382 4919 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370401 4919 flags.go:64] FLAG: --pod-manifest-path="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370412 4919 flags.go:64] FLAG: --pod-max-pids="-1" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370424 4919 flags.go:64] FLAG: --pods-per-core="0" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370435 4919 flags.go:64] FLAG: --port="10250" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370447 4919 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370458 4919 flags.go:64] FLAG: --provider-id="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370469 4919 flags.go:64] FLAG: --qos-reserved="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370481 4919 flags.go:64] FLAG: --read-only-port="10255" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370492 4919 flags.go:64] FLAG: --register-node="true" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370503 4919 flags.go:64] FLAG: --register-schedulable="true" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370517 4919 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370537 4919 flags.go:64] FLAG: --registry-burst="10" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370548 4919 flags.go:64] FLAG: --registry-qps="5" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370559 4919 flags.go:64] FLAG: --reserved-cpus="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370571 4919 flags.go:64] FLAG: --reserved-memory="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370586 4919 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370597 4919 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370608 4919 flags.go:64] FLAG: --rotate-certificates="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370620 4919 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370631 4919 flags.go:64] FLAG: --runonce="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370643 4919 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370655 4919 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370667 4919 flags.go:64] FLAG: --seccomp-default="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370679 4919 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370691 4919 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370703 4919 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370716 4919 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370729 4919 flags.go:64] FLAG: --storage-driver-password="root" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370738 4919 flags.go:64] FLAG: --storage-driver-secure="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370747 4919 flags.go:64] FLAG: --storage-driver-table="stats" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370756 4919 flags.go:64] FLAG: --storage-driver-user="root" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370765 4919 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370775 4919 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370784 4919 flags.go:64] FLAG: --system-cgroups="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370793 4919 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370808 4919 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370817 4919 flags.go:64] FLAG: --tls-cert-file="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370826 4919 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370837 4919 flags.go:64] FLAG: --tls-min-version="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370846 4919 flags.go:64] FLAG: --tls-private-key-file="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370855 4919 flags.go:64] FLAG: --topology-manager-policy="none" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370864 4919 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370873 4919 flags.go:64] FLAG: --topology-manager-scope="container" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370882 4919 flags.go:64] FLAG: --v="2" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370894 4919 flags.go:64] FLAG: --version="false" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370906 4919 flags.go:64] FLAG: --vmodule="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370919 4919 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.370929 4919 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371148 4919 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371160 4919 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371169 4919 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371179 4919 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371187 4919 feature_gate.go:330] unrecognized feature gate: Example Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371195 4919 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371203 4919 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371242 4919 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371250 4919 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371258 4919 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371271 4919 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371281 4919 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371291 4919 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371301 4919 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371311 4919 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371320 4919 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371331 4919 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371340 4919 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371350 4919 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371361 4919 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371370 4919 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371380 4919 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371389 4919 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371396 4919 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371406 4919 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371416 4919 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371428 4919 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371436 4919 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371445 4919 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371454 4919 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371462 4919 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371470 4919 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371478 4919 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371486 4919 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371495 4919 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371504 4919 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371512 4919 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371520 4919 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371528 4919 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371535 4919 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371544 4919 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371551 4919 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371562 4919 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371573 4919 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371583 4919 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371592 4919 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371603 4919 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371612 4919 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371621 4919 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371629 4919 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371637 4919 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371644 4919 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371652 4919 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371660 4919 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371667 4919 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371676 4919 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371684 4919 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371692 4919 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371700 4919 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371707 4919 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371715 4919 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371723 4919 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371731 4919 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371738 4919 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371746 4919 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371754 4919 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371764 4919 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371774 4919 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371782 4919 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371789 4919 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.371798 4919 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.371825 4919 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.384135 4919 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.384179 4919 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384325 4919 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384348 4919 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384358 4919 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384368 4919 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384377 4919 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384386 4919 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384394 4919 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384403 4919 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384411 4919 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384419 4919 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384428 4919 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384435 4919 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384443 4919 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384450 4919 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384460 4919 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384469 4919 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384481 4919 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384495 4919 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384504 4919 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384512 4919 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384521 4919 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384530 4919 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384538 4919 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384546 4919 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384553 4919 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384562 4919 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384570 4919 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384598 4919 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384606 4919 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384614 4919 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384622 4919 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384632 4919 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384641 4919 feature_gate.go:330] unrecognized feature gate: Example Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384650 4919 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384661 4919 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384670 4919 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384680 4919 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384688 4919 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384696 4919 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384704 4919 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384713 4919 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384720 4919 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384731 4919 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384739 4919 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384747 4919 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384755 4919 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384763 4919 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384771 4919 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384778 4919 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384790 4919 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384798 4919 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384805 4919 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384814 4919 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384822 4919 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384829 4919 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384837 4919 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384844 4919 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384852 4919 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384863 4919 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384873 4919 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384881 4919 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384890 4919 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384900 4919 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384909 4919 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384917 4919 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384926 4919 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384935 4919 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384943 4919 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384951 4919 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384960 4919 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.384969 4919 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.384983 4919 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385298 4919 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385320 4919 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385331 4919 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385342 4919 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385352 4919 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385362 4919 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385372 4919 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385382 4919 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385394 4919 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385403 4919 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385411 4919 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385419 4919 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385427 4919 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385436 4919 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385444 4919 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385451 4919 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385459 4919 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385467 4919 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385475 4919 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385483 4919 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385490 4919 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385498 4919 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385506 4919 feature_gate.go:330] unrecognized feature gate: Example Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385513 4919 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385521 4919 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385529 4919 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385539 4919 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385550 4919 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385560 4919 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385568 4919 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385577 4919 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385585 4919 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385593 4919 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385601 4919 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385609 4919 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385617 4919 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385625 4919 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385633 4919 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385641 4919 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385648 4919 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385657 4919 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385665 4919 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385673 4919 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385680 4919 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385688 4919 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385696 4919 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385706 4919 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385714 4919 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385722 4919 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385730 4919 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385738 4919 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385745 4919 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385753 4919 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385761 4919 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385769 4919 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385776 4919 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385784 4919 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385792 4919 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385800 4919 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385808 4919 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385815 4919 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385823 4919 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385830 4919 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385841 4919 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385851 4919 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385859 4919 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385867 4919 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385876 4919 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385884 4919 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385894 4919 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.385904 4919 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.385917 4919 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.386919 4919 server.go:940] "Client rotation is on, will bootstrap in background" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.392340 4919 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.392467 4919 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.393908 4919 server.go:997] "Starting client certificate rotation" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.393957 4919 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.394197 4919 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-05 07:10:10.963818919 +0000 UTC Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.394338 4919 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1570h56m35.569486402s for next certificate rotation Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.420946 4919 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.424996 4919 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.444822 4919 log.go:25] "Validated CRI v1 runtime API" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.486166 4919 log.go:25] "Validated CRI v1 image API" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.488774 4919 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.497911 4919 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-30-20-03-23-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.497960 4919 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.526581 4919 manager.go:217] Machine: {Timestamp:2025-09-30 20:13:35.521382034 +0000 UTC m=+0.637415231 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:624bf2d2-e12c-4fba-8731-56406029f22b BootID:cc7fbf32-6c9b-4b90-bd86-52c553e5254d Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:1c:4e:b8 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:1c:4e:b8 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:7e:3d:12 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:19:48:3e Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:3b:80:63 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:48:3b:bb Speed:-1 Mtu:1496} {Name:eth10 MacAddress:66:56:da:4f:a6:46 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:de:bb:c8:ae:bf:c2 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.526965 4919 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.527200 4919 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.531162 4919 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.531474 4919 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.531550 4919 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.531852 4919 topology_manager.go:138] "Creating topology manager with none policy" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.531871 4919 container_manager_linux.go:303] "Creating device plugin manager" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.532450 4919 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.533288 4919 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.533547 4919 state_mem.go:36] "Initialized new in-memory state store" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.533987 4919 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.537974 4919 kubelet.go:418] "Attempting to sync node with API server" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.538198 4919 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.538349 4919 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.538387 4919 kubelet.go:324] "Adding apiserver pod source" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.538411 4919 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.545622 4919 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.546994 4919 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.547449 4919 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.547601 4919 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:35 crc kubenswrapper[4919]: E0930 20:13:35.547912 4919 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Sep 30 20:13:35 crc kubenswrapper[4919]: E0930 20:13:35.547829 4919 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.550121 4919 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.552108 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.552148 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.552171 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.552185 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.552206 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.552242 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.552255 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.552276 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.552292 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.552305 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.552322 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.552335 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.553303 4919 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.554022 4919 server.go:1280] "Started kubelet" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.558991 4919 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.559160 4919 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.559458 4919 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 30 20:13:35 crc systemd[1]: Started Kubernetes Kubelet. Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.559974 4919 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.561477 4919 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.561519 4919 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.561996 4919 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 20:19:21.175985833 +0000 UTC Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.562071 4919 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 2064h5m45.613919588s for next certificate rotation Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.562180 4919 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.562194 4919 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 30 20:13:35 crc kubenswrapper[4919]: E0930 20:13:35.562389 4919 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.562273 4919 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 30 20:13:35 crc kubenswrapper[4919]: E0930 20:13:35.563129 4919 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.75:6443: connect: connection refused" interval="200ms" Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.564081 4919 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.564630 4919 factory.go:55] Registering systemd factory Sep 30 20:13:35 crc kubenswrapper[4919]: E0930 20:13:35.564626 4919 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.564664 4919 factory.go:221] Registration of the systemd container factory successfully Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.565078 4919 factory.go:153] Registering CRI-O factory Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.565132 4919 factory.go:221] Registration of the crio container factory successfully Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.565258 4919 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.565308 4919 factory.go:103] Registering Raw factory Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.565333 4919 manager.go:1196] Started watching for new ooms in manager Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.568714 4919 server.go:460] "Adding debug handlers to kubelet server" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.571151 4919 manager.go:319] Starting recovery of all containers Sep 30 20:13:35 crc kubenswrapper[4919]: E0930 20:13:35.568833 4919 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.75:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a2892d581a456 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-30 20:13:35.553979478 +0000 UTC m=+0.670012645,LastTimestamp:2025-09-30 20:13:35.553979478 +0000 UTC m=+0.670012645,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.579718 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.579804 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.579835 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.579865 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.579893 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.579920 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.579951 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.579975 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.580006 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.580033 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.580058 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582028 4919 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582082 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582107 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582155 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582176 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582295 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582324 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582344 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582364 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582384 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582402 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582451 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582490 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582510 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582528 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582555 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582580 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582599 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582620 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582688 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582707 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582726 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582743 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582762 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582782 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582801 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582822 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582864 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582883 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582901 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582920 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582964 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.582988 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583007 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583026 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583047 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583067 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583086 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583105 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583124 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583143 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583163 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583191 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583250 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583280 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583303 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583322 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583341 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583361 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583382 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583401 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583422 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583444 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583463 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583484 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583502 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583521 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583539 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583558 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583578 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583597 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583616 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583637 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583658 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583700 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583720 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583739 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583773 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583793 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583812 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583830 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583848 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583869 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583889 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583908 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.583956 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584020 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584040 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584058 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584078 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584097 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584118 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584151 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584194 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584269 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584298 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584324 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584349 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584386 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584425 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584458 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584514 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584541 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584590 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584753 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584864 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584912 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584933 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.584954 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585000 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585022 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585044 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585066 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585087 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585114 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585140 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585164 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585258 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585328 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585380 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585430 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585457 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.585483 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.586378 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.586449 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.586505 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587741 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587761 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587790 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587807 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587822 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587840 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587856 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587872 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587883 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587895 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587913 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587926 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587947 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587960 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587973 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.587991 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588011 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588027 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588041 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588053 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588071 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588085 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588108 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588122 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588134 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588153 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588168 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588192 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588206 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588251 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588268 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588286 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588305 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588326 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588338 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588354 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588366 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588380 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588398 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588448 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588469 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588480 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588492 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588508 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588522 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588539 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588553 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588569 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588592 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588604 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588619 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588631 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588642 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588662 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588676 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588695 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588706 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588718 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588735 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588748 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588758 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588773 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588783 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588800 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588815 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588829 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588847 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588857 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588872 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588882 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588894 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588909 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588921 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588936 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588946 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588958 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588974 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.588985 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.589002 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.589013 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.589027 4919 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.589036 4919 reconstruct.go:97] "Volume reconstruction finished" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.589043 4919 reconciler.go:26] "Reconciler: start to sync state" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.598709 4919 manager.go:324] Recovery completed Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.610392 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.612317 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.612391 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.612415 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.613457 4919 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.613483 4919 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.613508 4919 state_mem.go:36] "Initialized new in-memory state store" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.627604 4919 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.630758 4919 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.630849 4919 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.630900 4919 kubelet.go:2335] "Starting kubelet main sync loop" Sep 30 20:13:35 crc kubenswrapper[4919]: E0930 20:13:35.630976 4919 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.631023 4919 policy_none.go:49] "None policy: Start" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.632293 4919 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.632362 4919 state_mem.go:35] "Initializing new in-memory state store" Sep 30 20:13:35 crc kubenswrapper[4919]: W0930 20:13:35.632348 4919 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:35 crc kubenswrapper[4919]: E0930 20:13:35.632430 4919 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Sep 30 20:13:35 crc kubenswrapper[4919]: E0930 20:13:35.662880 4919 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.693572 4919 manager.go:334] "Starting Device Plugin manager" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.693636 4919 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.693655 4919 server.go:79] "Starting device plugin registration server" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.694154 4919 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.694193 4919 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.694911 4919 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.695115 4919 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.695131 4919 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 30 20:13:35 crc kubenswrapper[4919]: E0930 20:13:35.703845 4919 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.732123 4919 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.732313 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.734067 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.734135 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.734158 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.734445 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.734777 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.734886 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.736277 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.736326 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.736345 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.736560 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.736605 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.736639 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.736663 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.736835 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.736898 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.738763 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.738803 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.738820 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.738968 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.739246 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.739313 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.739337 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.739393 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.739416 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.741754 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.741819 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.741767 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.741846 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.741875 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.741898 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.742074 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.742366 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.742447 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.743303 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.743344 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.743359 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.743674 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.743756 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.745310 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.745371 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.745394 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.745776 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.747602 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.747633 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:35 crc kubenswrapper[4919]: E0930 20:13:35.764089 4919 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.75:6443: connect: connection refused" interval="400ms" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.791568 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.791712 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.791814 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.791905 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.791992 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.792180 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.792258 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.792290 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.792412 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.792505 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.792566 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.792610 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.792688 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.792833 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.792887 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.796860 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.798927 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.798976 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.799001 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.799054 4919 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 20:13:35 crc kubenswrapper[4919]: E0930 20:13:35.799762 4919 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.75:6443: connect: connection refused" node="crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.894773 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.894946 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.895043 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.895126 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.895280 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.895036 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.895596 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.895475 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.895720 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.895920 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.896135 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.896038 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.895821 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.896264 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.896509 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.896606 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.896699 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.896904 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.897127 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.897372 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.897579 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.897662 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.897741 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.897290 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.896810 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.897050 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.897981 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.898002 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.898066 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 20:13:35 crc kubenswrapper[4919]: I0930 20:13:35.897475 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:35.999940 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.001199 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.001254 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.001268 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.001291 4919 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 20:13:36 crc kubenswrapper[4919]: E0930 20:13:36.001757 4919 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.75:6443: connect: connection refused" node="crc" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.065678 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.072853 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.097611 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.116522 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.123687 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 30 20:13:36 crc kubenswrapper[4919]: W0930 20:13:36.140062 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-8f8c8843b476372d09cc4aab73b2d5a66961297177b27530d1900092aa2ea08d WatchSource:0}: Error finding container 8f8c8843b476372d09cc4aab73b2d5a66961297177b27530d1900092aa2ea08d: Status 404 returned error can't find the container with id 8f8c8843b476372d09cc4aab73b2d5a66961297177b27530d1900092aa2ea08d Sep 30 20:13:36 crc kubenswrapper[4919]: W0930 20:13:36.141627 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-a8aa4795f1e7ecf3302d08f6ace8a4e019e6cc12ea543fce3d59be3131a59213 WatchSource:0}: Error finding container a8aa4795f1e7ecf3302d08f6ace8a4e019e6cc12ea543fce3d59be3131a59213: Status 404 returned error can't find the container with id a8aa4795f1e7ecf3302d08f6ace8a4e019e6cc12ea543fce3d59be3131a59213 Sep 30 20:13:36 crc kubenswrapper[4919]: W0930 20:13:36.149436 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-dbb49c354f95729ff41404b5bb9834dfab22746cd48b411f72e51e508e96808d WatchSource:0}: Error finding container dbb49c354f95729ff41404b5bb9834dfab22746cd48b411f72e51e508e96808d: Status 404 returned error can't find the container with id dbb49c354f95729ff41404b5bb9834dfab22746cd48b411f72e51e508e96808d Sep 30 20:13:36 crc kubenswrapper[4919]: W0930 20:13:36.157247 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-2c33d6c4a8a4416e6a9a5a91e286ee7229bbf08b8575eed7e9d06f0afbf7a6ce WatchSource:0}: Error finding container 2c33d6c4a8a4416e6a9a5a91e286ee7229bbf08b8575eed7e9d06f0afbf7a6ce: Status 404 returned error can't find the container with id 2c33d6c4a8a4416e6a9a5a91e286ee7229bbf08b8575eed7e9d06f0afbf7a6ce Sep 30 20:13:36 crc kubenswrapper[4919]: W0930 20:13:36.159409 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-8301dfdbe6cabea1ae5ab660050a69f022cee5cd7d8fd996f7a5df4ee39e1bf5 WatchSource:0}: Error finding container 8301dfdbe6cabea1ae5ab660050a69f022cee5cd7d8fd996f7a5df4ee39e1bf5: Status 404 returned error can't find the container with id 8301dfdbe6cabea1ae5ab660050a69f022cee5cd7d8fd996f7a5df4ee39e1bf5 Sep 30 20:13:36 crc kubenswrapper[4919]: E0930 20:13:36.165264 4919 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.75:6443: connect: connection refused" interval="800ms" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.402313 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.404248 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.404308 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.404333 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.404395 4919 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 20:13:36 crc kubenswrapper[4919]: E0930 20:13:36.404887 4919 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.75:6443: connect: connection refused" node="crc" Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.560618 4919 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.636127 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"dbb49c354f95729ff41404b5bb9834dfab22746cd48b411f72e51e508e96808d"} Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.637270 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8f8c8843b476372d09cc4aab73b2d5a66961297177b27530d1900092aa2ea08d"} Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.638435 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a8aa4795f1e7ecf3302d08f6ace8a4e019e6cc12ea543fce3d59be3131a59213"} Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.639730 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"8301dfdbe6cabea1ae5ab660050a69f022cee5cd7d8fd996f7a5df4ee39e1bf5"} Sep 30 20:13:36 crc kubenswrapper[4919]: I0930 20:13:36.641021 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2c33d6c4a8a4416e6a9a5a91e286ee7229bbf08b8575eed7e9d06f0afbf7a6ce"} Sep 30 20:13:36 crc kubenswrapper[4919]: W0930 20:13:36.657883 4919 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:36 crc kubenswrapper[4919]: E0930 20:13:36.657990 4919 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Sep 30 20:13:36 crc kubenswrapper[4919]: W0930 20:13:36.784816 4919 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:36 crc kubenswrapper[4919]: E0930 20:13:36.785203 4919 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Sep 30 20:13:36 crc kubenswrapper[4919]: W0930 20:13:36.948438 4919 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:36 crc kubenswrapper[4919]: E0930 20:13:36.948522 4919 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Sep 30 20:13:36 crc kubenswrapper[4919]: E0930 20:13:36.966482 4919 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.75:6443: connect: connection refused" interval="1.6s" Sep 30 20:13:36 crc kubenswrapper[4919]: W0930 20:13:36.985394 4919 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:36 crc kubenswrapper[4919]: E0930 20:13:36.985454 4919 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.205591 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.207783 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.207832 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.207845 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.207876 4919 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 20:13:37 crc kubenswrapper[4919]: E0930 20:13:37.208432 4919 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.75:6443: connect: connection refused" node="crc" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.560700 4919 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.647628 4919 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec" exitCode=0 Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.647712 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec"} Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.647761 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.648987 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.649037 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.649051 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.651666 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3"} Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.651736 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.651756 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc"} Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.651781 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004"} Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.651798 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533"} Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.652932 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.652967 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.652980 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.654273 4919 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774" exitCode=0 Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.654358 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774"} Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.654396 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.656101 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.656136 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.656154 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.656762 4919 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="ce7938ca29df72ec7c8835e35a9d40bb2b3e15a4230cacf1904e05d105109a33" exitCode=0 Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.656829 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"ce7938ca29df72ec7c8835e35a9d40bb2b3e15a4230cacf1904e05d105109a33"} Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.656935 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.659032 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.659506 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.659529 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.659540 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.660035 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.660086 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.660179 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.660280 4919 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c" exitCode=0 Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.660319 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c"} Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.660650 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.663078 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.663146 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:37 crc kubenswrapper[4919]: I0930 20:13:37.663161 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.124017 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:38 crc kubenswrapper[4919]: W0930 20:13:38.306976 4919 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:38 crc kubenswrapper[4919]: E0930 20:13:38.307057 4919 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.560164 4919 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:38 crc kubenswrapper[4919]: E0930 20:13:38.567013 4919 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.75:6443: connect: connection refused" interval="3.2s" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.672206 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"f2e3a2e1c1876f84e897ccb3b0cca07a8802fb2afc2de8768c8033ec904b900a"} Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.672322 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.673689 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.673731 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.673747 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.675299 4919 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed" exitCode=0 Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.675363 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed"} Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.675403 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.676178 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.676236 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.676253 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.679640 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7"} Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.679695 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4"} Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.679709 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e"} Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.679665 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.688575 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.688618 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.688635 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.696965 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.697009 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d"} Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.697049 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7"} Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.697065 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c"} Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.697078 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2"} Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.698668 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.698711 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.698723 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.808703 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.809848 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.809879 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.809889 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:38 crc kubenswrapper[4919]: I0930 20:13:38.809911 4919 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 20:13:38 crc kubenswrapper[4919]: E0930 20:13:38.810840 4919 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.75:6443: connect: connection refused" node="crc" Sep 30 20:13:38 crc kubenswrapper[4919]: W0930 20:13:38.813853 4919 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:38 crc kubenswrapper[4919]: E0930 20:13:38.813918 4919 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Sep 30 20:13:39 crc kubenswrapper[4919]: W0930 20:13:39.113022 4919 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.75:6443: connect: connection refused Sep 30 20:13:39 crc kubenswrapper[4919]: E0930 20:13:39.113136 4919 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.75:6443: connect: connection refused" logger="UnhandledError" Sep 30 20:13:39 crc kubenswrapper[4919]: E0930 20:13:39.246358 4919 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.75:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a2892d581a456 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-30 20:13:35.553979478 +0000 UTC m=+0.670012645,LastTimestamp:2025-09-30 20:13:35.553979478 +0000 UTC m=+0.670012645,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.703175 4919 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1" exitCode=0 Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.703258 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1"} Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.703348 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.704277 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.704297 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.704305 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.707837 4919 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.707847 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4"} Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.707865 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.707968 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.708022 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.707967 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.708694 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.708727 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.708739 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.709301 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.709326 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.709343 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.709828 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.709992 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.710054 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.709855 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.710244 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:39 crc kubenswrapper[4919]: I0930 20:13:39.710272 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:40 crc kubenswrapper[4919]: I0930 20:13:40.720176 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52"} Sep 30 20:13:40 crc kubenswrapper[4919]: I0930 20:13:40.720569 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8"} Sep 30 20:13:40 crc kubenswrapper[4919]: I0930 20:13:40.720595 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4"} Sep 30 20:13:40 crc kubenswrapper[4919]: I0930 20:13:40.720611 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1"} Sep 30 20:13:40 crc kubenswrapper[4919]: I0930 20:13:40.720309 4919 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 20:13:40 crc kubenswrapper[4919]: I0930 20:13:40.720681 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:40 crc kubenswrapper[4919]: I0930 20:13:40.721680 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:40 crc kubenswrapper[4919]: I0930 20:13:40.721732 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:40 crc kubenswrapper[4919]: I0930 20:13:40.721749 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.124294 4919 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.124438 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.707750 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.728564 4919 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.728608 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.728623 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279"} Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.728609 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.729684 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.729684 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.729723 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.729737 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.729740 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:41 crc kubenswrapper[4919]: I0930 20:13:41.729755 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.011586 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.013180 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.013266 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.013286 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.013319 4919 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.257187 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.257632 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.259855 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.259926 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.259945 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.731881 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.733456 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.733520 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:42 crc kubenswrapper[4919]: I0930 20:13:42.733533 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:43 crc kubenswrapper[4919]: I0930 20:13:43.100495 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:43 crc kubenswrapper[4919]: I0930 20:13:43.100725 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:43 crc kubenswrapper[4919]: I0930 20:13:43.102131 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:43 crc kubenswrapper[4919]: I0930 20:13:43.102166 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:43 crc kubenswrapper[4919]: I0930 20:13:43.102178 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.576395 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.576565 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.577959 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.578054 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.578089 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.658590 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.658902 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.660389 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.660432 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.660441 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.756564 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.756823 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.758140 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.758205 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:44 crc kubenswrapper[4919]: I0930 20:13:44.758252 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:45 crc kubenswrapper[4919]: E0930 20:13:45.704019 4919 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 30 20:13:45 crc kubenswrapper[4919]: I0930 20:13:45.800238 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:45 crc kubenswrapper[4919]: I0930 20:13:45.800524 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:45 crc kubenswrapper[4919]: I0930 20:13:45.802477 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:45 crc kubenswrapper[4919]: I0930 20:13:45.802544 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:45 crc kubenswrapper[4919]: I0930 20:13:45.802556 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:45 crc kubenswrapper[4919]: I0930 20:13:45.807617 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:46 crc kubenswrapper[4919]: I0930 20:13:46.251352 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 30 20:13:46 crc kubenswrapper[4919]: I0930 20:13:46.251625 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:46 crc kubenswrapper[4919]: I0930 20:13:46.253318 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:46 crc kubenswrapper[4919]: I0930 20:13:46.253391 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:46 crc kubenswrapper[4919]: I0930 20:13:46.253415 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:46 crc kubenswrapper[4919]: I0930 20:13:46.744565 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:46 crc kubenswrapper[4919]: I0930 20:13:46.746372 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:46 crc kubenswrapper[4919]: I0930 20:13:46.746783 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:46 crc kubenswrapper[4919]: I0930 20:13:46.746939 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:46 crc kubenswrapper[4919]: I0930 20:13:46.751509 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:47 crc kubenswrapper[4919]: I0930 20:13:47.746556 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:47 crc kubenswrapper[4919]: I0930 20:13:47.747670 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:47 crc kubenswrapper[4919]: I0930 20:13:47.747713 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:47 crc kubenswrapper[4919]: I0930 20:13:47.747726 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.561441 4919 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.698131 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.698677 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.700056 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.700112 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.700135 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.753854 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.755788 4919 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4" exitCode=255 Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.755842 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4"} Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.755990 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.756995 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.757022 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.757034 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.757505 4919 scope.go:117] "RemoveContainer" containerID="f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4" Sep 30 20:13:49 crc kubenswrapper[4919]: W0930 20:13:49.947745 4919 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Sep 30 20:13:49 crc kubenswrapper[4919]: I0930 20:13:49.947881 4919 trace.go:236] Trace[2093908739]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 20:13:39.946) (total time: 10001ms): Sep 30 20:13:49 crc kubenswrapper[4919]: Trace[2093908739]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (20:13:49.947) Sep 30 20:13:49 crc kubenswrapper[4919]: Trace[2093908739]: [10.001764181s] [10.001764181s] END Sep 30 20:13:49 crc kubenswrapper[4919]: E0930 20:13:49.947924 4919 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Sep 30 20:13:50 crc kubenswrapper[4919]: I0930 20:13:50.204352 4919 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 30 20:13:50 crc kubenswrapper[4919]: I0930 20:13:50.204437 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 30 20:13:50 crc kubenswrapper[4919]: I0930 20:13:50.233967 4919 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 30 20:13:50 crc kubenswrapper[4919]: I0930 20:13:50.234026 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 30 20:13:50 crc kubenswrapper[4919]: I0930 20:13:50.764873 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 30 20:13:50 crc kubenswrapper[4919]: I0930 20:13:50.768804 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177"} Sep 30 20:13:50 crc kubenswrapper[4919]: I0930 20:13:50.769059 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:50 crc kubenswrapper[4919]: I0930 20:13:50.770466 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:50 crc kubenswrapper[4919]: I0930 20:13:50.770525 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:50 crc kubenswrapper[4919]: I0930 20:13:50.770546 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:51 crc kubenswrapper[4919]: I0930 20:13:51.124770 4919 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 20:13:51 crc kubenswrapper[4919]: I0930 20:13:51.124896 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 20:13:53 crc kubenswrapper[4919]: I0930 20:13:53.101523 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:53 crc kubenswrapper[4919]: I0930 20:13:53.101813 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:13:53 crc kubenswrapper[4919]: I0930 20:13:53.103664 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:13:53 crc kubenswrapper[4919]: I0930 20:13:53.103749 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:13:53 crc kubenswrapper[4919]: I0930 20:13:53.103772 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.089968 4919 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.549801 4919 apiserver.go:52] "Watching apiserver" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.557857 4919 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.558441 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.559045 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.559301 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.559419 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:13:54 crc kubenswrapper[4919]: E0930 20:13:54.559470 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.559556 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 20:13:54 crc kubenswrapper[4919]: E0930 20:13:54.559617 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.559699 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.559756 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:13:54 crc kubenswrapper[4919]: E0930 20:13:54.559912 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.562862 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.562896 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.563118 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.563321 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.563510 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.563534 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.563997 4919 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.564587 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.564630 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.565144 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.602123 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.620415 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.641064 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.656109 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.667210 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.674973 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.677522 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.685095 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.697816 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.714826 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.728440 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.742947 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.756593 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.775894 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.788444 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.804939 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.821555 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.837012 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:54 crc kubenswrapper[4919]: I0930 20:13:54.857699 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.213032 4919 trace.go:236] Trace[2094260004]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 20:13:43.123) (total time: 12089ms): Sep 30 20:13:55 crc kubenswrapper[4919]: Trace[2094260004]: ---"Objects listed" error: 12089ms (20:13:55.212) Sep 30 20:13:55 crc kubenswrapper[4919]: Trace[2094260004]: [12.089343633s] [12.089343633s] END Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.213080 4919 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.214055 4919 trace.go:236] Trace[1849467189]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 20:13:42.885) (total time: 12328ms): Sep 30 20:13:55 crc kubenswrapper[4919]: Trace[1849467189]: ---"Objects listed" error: 12328ms (20:13:55.213) Sep 30 20:13:55 crc kubenswrapper[4919]: Trace[1849467189]: [12.328716973s] [12.328716973s] END Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.214111 4919 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.215487 4919 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.217373 4919 trace.go:236] Trace[707112582]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (30-Sep-2025 20:13:44.352) (total time: 10864ms): Sep 30 20:13:55 crc kubenswrapper[4919]: Trace[707112582]: ---"Objects listed" error: 10864ms (20:13:55.217) Sep 30 20:13:55 crc kubenswrapper[4919]: Trace[707112582]: [10.864272968s] [10.864272968s] END Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.217429 4919 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.217920 4919 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.223018 4919 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316454 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316524 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316559 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316591 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316627 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316660 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316691 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316747 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316781 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316814 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316844 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316874 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316903 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316935 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.316969 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317016 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317045 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317082 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317113 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317142 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317171 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317202 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317256 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317287 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317337 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317373 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317440 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317481 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317510 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317541 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317573 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317603 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317633 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317666 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317703 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317745 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317776 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317829 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317882 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317915 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317946 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.317978 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318015 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318048 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318080 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318111 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318153 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318183 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318238 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318273 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318304 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318338 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318369 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318400 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318431 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318465 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318496 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318531 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318563 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318596 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318629 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318660 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318693 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318725 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318756 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318788 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318822 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318854 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318890 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318922 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318952 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.318986 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319050 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319086 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319119 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319150 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319182 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319246 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319277 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319308 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319342 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319385 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319432 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319473 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319505 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319536 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319569 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319600 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319633 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319668 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319701 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319733 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319765 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319797 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319828 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319859 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319892 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319926 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.319985 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320018 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320051 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320084 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320116 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320149 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320180 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320241 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320275 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320308 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320344 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320376 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320409 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320443 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320477 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320512 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320550 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320587 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320625 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320658 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320669 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320692 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320772 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320802 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320808 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320834 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320865 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320892 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320919 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320943 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320966 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.320991 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321014 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321010 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321036 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321058 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321080 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321101 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321125 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321147 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321170 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321193 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321237 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321260 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321247 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321276 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321330 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321278 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321524 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321812 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321917 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321962 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.321290 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322125 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322157 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322190 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322235 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322266 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322289 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322312 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322336 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322360 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322384 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322409 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322432 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322456 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322481 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322503 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322526 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322549 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322573 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322595 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322620 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322650 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322674 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322698 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322721 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322746 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322767 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322789 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322811 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322836 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322858 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322888 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322911 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322934 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322956 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322978 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322999 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323053 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323077 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323102 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323129 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323154 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323176 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323228 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323253 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323276 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323299 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323324 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323345 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323367 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323391 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323415 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323437 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323462 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323487 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323571 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323622 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323652 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323678 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323707 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323735 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323759 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323785 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323810 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323841 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323864 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323893 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323918 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323943 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323969 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324032 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324049 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324064 4919 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324080 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324094 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324107 4919 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324119 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324133 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324148 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324162 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324176 4919 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322053 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322051 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322144 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322303 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322333 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322439 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322590 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.322678 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323096 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323155 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323448 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323730 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.323907 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324403 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324415 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.326358 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.326727 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.327055 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.327224 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.327417 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.327881 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.327919 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.327890 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324256 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.324289 4919 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.328029 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:13:55.82800739 +0000 UTC m=+20.944040527 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.328408 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.328723 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.328765 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.324249 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.329130 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.329287 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.329767 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.330049 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.330055 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.330121 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.330170 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.330476 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.330514 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.331245 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.331196 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.331401 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.331593 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.331826 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.332141 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.332434 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.332523 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.332547 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.332551 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.332765 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.333299 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.333344 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.333359 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.334509 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.334684 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.335152 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.335191 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.335283 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.335784 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.335843 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.336097 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.336528 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.336532 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.337269 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.337540 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.337606 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.337934 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.337971 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.338307 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.338885 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.338943 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.339857 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.339701 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.340066 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.340063 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.340336 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.340526 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.340785 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.340952 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.341093 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.341153 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.341383 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.342332 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.342402 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.342687 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.342887 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.342906 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.342982 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.343559 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.343583 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.343820 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.344660 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.345157 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.345274 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.345434 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.345458 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.345476 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.345542 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:13:55.845516477 +0000 UTC m=+20.961549614 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.345611 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.345652 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.345808 4919 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.346048 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.346113 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:13:55.846081823 +0000 UTC m=+20.962114950 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.346310 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.346518 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.346680 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.346701 4919 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.346926 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.347038 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.347058 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.347103 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.347285 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.347473 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.345875 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.346476 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.348143 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.348384 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.345820 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.348583 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.348685 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.349334 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.349399 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.349437 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.350754 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.351276 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.352314 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.352853 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.353287 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.353571 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.353663 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.353774 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.354324 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.354865 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.355305 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.357049 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.357151 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.358491 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.358587 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.363071 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.363520 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.364053 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.364436 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.364714 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.364758 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.364776 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.364805 4919 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.364916 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 20:13:55.864882368 +0000 UTC m=+20.980915535 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.365044 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.365075 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.365123 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.367427 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.367828 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.367927 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.367935 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.348152 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.369111 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.369188 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.369612 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.369638 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.369656 4919 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.369716 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 20:13:55.869695297 +0000 UTC m=+20.985728434 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.370745 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.371187 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.371430 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.371459 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.371530 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.372049 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.372864 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.372963 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.374097 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.374500 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.377091 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.377544 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.377784 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.378196 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.378738 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.379487 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.379492 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.379759 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.379878 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.380911 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.380952 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.381805 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.382129 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.382197 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.382328 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.382501 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.382976 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.383724 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.383592 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.384323 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.384471 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.384558 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.384561 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.384728 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.384874 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.384942 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.384951 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.385154 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.385330 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.386054 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.386417 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.410967 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.411392 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.423002 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.424768 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.424810 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.424888 4919 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.424904 4919 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.424917 4919 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.424930 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.424942 4919 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.424954 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.424951 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.424966 4919 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425076 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425106 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425122 4919 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425134 4919 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425149 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425159 4919 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425186 4919 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425196 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425205 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425234 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425243 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425253 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425263 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425273 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425285 4919 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425298 4919 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425308 4919 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425318 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425327 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425336 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425347 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425356 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425368 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425378 4919 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425388 4919 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425398 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425409 4919 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425418 4919 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425427 4919 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425437 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425448 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425458 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425467 4919 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425477 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425488 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425498 4919 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425508 4919 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425517 4919 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425530 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425540 4919 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425550 4919 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425559 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425570 4919 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425580 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425592 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425602 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425614 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425626 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425637 4919 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425648 4919 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425658 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425668 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425678 4919 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425687 4919 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425696 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425706 4919 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425717 4919 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425725 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425734 4919 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425745 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425756 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425766 4919 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425776 4919 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425786 4919 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425797 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425806 4919 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425818 4919 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425828 4919 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425839 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425849 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425861 4919 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425872 4919 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425884 4919 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425895 4919 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425906 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425916 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425925 4919 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425935 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425945 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425957 4919 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425968 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425978 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.425992 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426002 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426012 4919 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426024 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426034 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426048 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426060 4919 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426070 4919 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426081 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426093 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426407 4919 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426418 4919 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426427 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426436 4919 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426446 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426455 4919 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426463 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426472 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426523 4919 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426534 4919 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426546 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426556 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426567 4919 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426578 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426616 4919 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426626 4919 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426634 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426643 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426652 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426689 4919 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426700 4919 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426709 4919 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426719 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426728 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426736 4919 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426745 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426753 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426761 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426770 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426779 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426787 4919 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426797 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426833 4919 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426842 4919 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426851 4919 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426859 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426868 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426876 4919 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426902 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426912 4919 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426921 4919 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426929 4919 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426937 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426946 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426954 4919 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426962 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426971 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426979 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426987 4919 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.426996 4919 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427004 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427013 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427022 4919 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427030 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427038 4919 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427048 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427056 4919 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427067 4919 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427075 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427083 4919 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427093 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427102 4919 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427110 4919 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427119 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427127 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427135 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427143 4919 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427152 4919 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427160 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427167 4919 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427175 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427183 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427192 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427201 4919 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427264 4919 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427403 4919 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427412 4919 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427420 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427428 4919 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427436 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427444 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427452 4919 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427461 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427470 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.427477 4919 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.475058 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.482266 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.486299 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 30 20:13:55 crc kubenswrapper[4919]: W0930 20:13:55.498594 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-7d7cb4833b31a43448aa228023e4476783f25d3d887a58c05699997638215772 WatchSource:0}: Error finding container 7d7cb4833b31a43448aa228023e4476783f25d3d887a58c05699997638215772: Status 404 returned error can't find the container with id 7d7cb4833b31a43448aa228023e4476783f25d3d887a58c05699997638215772 Sep 30 20:13:55 crc kubenswrapper[4919]: W0930 20:13:55.502519 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-29826468f802fbde68e9516afc8d20665db2d5ac0178bd87af44f1ca4c14ad90 WatchSource:0}: Error finding container 29826468f802fbde68e9516afc8d20665db2d5ac0178bd87af44f1ca4c14ad90: Status 404 returned error can't find the container with id 29826468f802fbde68e9516afc8d20665db2d5ac0178bd87af44f1ca4c14ad90 Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.640489 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.641911 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.643737 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.645188 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.645416 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.646877 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.649148 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.650070 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.652153 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.653565 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.657034 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.658259 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.660439 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.664157 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.665197 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.666438 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.669126 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.669718 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.670711 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.671084 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.671649 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.672605 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.673029 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.673965 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.674959 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.676038 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.676527 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.677254 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.678037 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.679931 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.680559 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.681835 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.682388 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.683429 4919 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.683544 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.685198 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.685683 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.686590 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.689311 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.690659 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.691409 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.693499 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.694400 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.695473 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.695640 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.696176 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.697245 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.698347 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.698832 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.699370 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.700243 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.701517 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.702014 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.702685 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.703655 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.704350 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.705250 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.705447 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.705963 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.714779 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.723668 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.785896 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5"} Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.785967 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"62a4107f0dd10668e6aae7197099638d680845460518626e05ab955be0e1c175"} Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.788190 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e"} Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.788263 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397"} Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.788275 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"29826468f802fbde68e9516afc8d20665db2d5ac0178bd87af44f1ca4c14ad90"} Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.789076 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"7d7cb4833b31a43448aa228023e4476783f25d3d887a58c05699997638215772"} Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.798032 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.811417 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.821360 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.830349 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.830603 4919 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.830705 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:13:56.830682853 +0000 UTC m=+21.946715970 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.832641 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.845538 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.856335 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.874297 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.886224 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.896459 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.909249 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.919370 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.928860 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.931171 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.931384 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:13:56.931341357 +0000 UTC m=+22.047374494 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.931500 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.931579 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.931623 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.931831 4919 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.931911 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:13:56.931888943 +0000 UTC m=+22.047922070 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.932256 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.932257 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.932278 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.932288 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.932293 4919 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.932304 4919 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.932325 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 20:13:56.932316475 +0000 UTC m=+22.048349602 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:55 crc kubenswrapper[4919]: E0930 20:13:55.932348 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 20:13:56.932334636 +0000 UTC m=+22.048367773 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.946257 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:55 crc kubenswrapper[4919]: I0930 20:13:55.959150 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 30 20:13:56 crc kubenswrapper[4919]: I0930 20:13:56.631840 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:13:56 crc kubenswrapper[4919]: I0930 20:13:56.632015 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:13:56 crc kubenswrapper[4919]: I0930 20:13:56.631862 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.632033 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.632356 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.632853 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:13:56 crc kubenswrapper[4919]: I0930 20:13:56.840090 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.840237 4919 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.840286 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:13:58.840272401 +0000 UTC m=+23.956305528 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:13:56 crc kubenswrapper[4919]: I0930 20:13:56.940727 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.941021 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:13:58.940960976 +0000 UTC m=+24.056994093 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:13:56 crc kubenswrapper[4919]: I0930 20:13:56.941120 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:13:56 crc kubenswrapper[4919]: I0930 20:13:56.941249 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:13:56 crc kubenswrapper[4919]: I0930 20:13:56.941291 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.941456 4919 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.941530 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.941562 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.941576 4919 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.941578 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.941594 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:13:58.941568093 +0000 UTC m=+24.057601220 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.941630 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.941637 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 20:13:58.941629515 +0000 UTC m=+24.057662642 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.941649 4919 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:56 crc kubenswrapper[4919]: E0930 20:13:56.941740 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 20:13:58.941702607 +0000 UTC m=+24.057735734 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.323025 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-5xx2l"] Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.323509 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-5xx2l" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.325603 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.325902 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.330528 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.354148 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.368499 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.383968 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.398086 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.411764 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.429436 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.446360 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/02a9256e-b65e-4ed6-877b-27c4fa0d3339-hosts-file\") pod \"node-resolver-5xx2l\" (UID: \"02a9256e-b65e-4ed6-877b-27c4fa0d3339\") " pod="openshift-dns/node-resolver-5xx2l" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.446432 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62wnk\" (UniqueName: \"kubernetes.io/projected/02a9256e-b65e-4ed6-877b-27c4fa0d3339-kube-api-access-62wnk\") pod \"node-resolver-5xx2l\" (UID: \"02a9256e-b65e-4ed6-877b-27c4fa0d3339\") " pod="openshift-dns/node-resolver-5xx2l" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.448303 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.462446 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.547062 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/02a9256e-b65e-4ed6-877b-27c4fa0d3339-hosts-file\") pod \"node-resolver-5xx2l\" (UID: \"02a9256e-b65e-4ed6-877b-27c4fa0d3339\") " pod="openshift-dns/node-resolver-5xx2l" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.547148 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62wnk\" (UniqueName: \"kubernetes.io/projected/02a9256e-b65e-4ed6-877b-27c4fa0d3339-kube-api-access-62wnk\") pod \"node-resolver-5xx2l\" (UID: \"02a9256e-b65e-4ed6-877b-27c4fa0d3339\") " pod="openshift-dns/node-resolver-5xx2l" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.547166 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/02a9256e-b65e-4ed6-877b-27c4fa0d3339-hosts-file\") pod \"node-resolver-5xx2l\" (UID: \"02a9256e-b65e-4ed6-877b-27c4fa0d3339\") " pod="openshift-dns/node-resolver-5xx2l" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.570983 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62wnk\" (UniqueName: \"kubernetes.io/projected/02a9256e-b65e-4ed6-877b-27c4fa0d3339-kube-api-access-62wnk\") pod \"node-resolver-5xx2l\" (UID: \"02a9256e-b65e-4ed6-877b-27c4fa0d3339\") " pod="openshift-dns/node-resolver-5xx2l" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.633980 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-5xx2l" Sep 30 20:13:57 crc kubenswrapper[4919]: W0930 20:13:57.647124 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02a9256e_b65e_4ed6_877b_27c4fa0d3339.slice/crio-cd7de1b7dde47bbef1ebfb35677addef4c5ffa2451e57f54f1acbc14f4d920b2 WatchSource:0}: Error finding container cd7de1b7dde47bbef1ebfb35677addef4c5ffa2451e57f54f1acbc14f4d920b2: Status 404 returned error can't find the container with id cd7de1b7dde47bbef1ebfb35677addef4c5ffa2451e57f54f1acbc14f4d920b2 Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.725980 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-s6g9s"] Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.727252 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.730301 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-p4zv6"] Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.730812 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-c5crr"] Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.730962 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.731107 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.732196 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.732915 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.742547 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.743056 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.743177 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.743261 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.743486 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.743536 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.743624 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.743889 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.743941 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.745382 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.776359 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.799336 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-5xx2l" event={"ID":"02a9256e-b65e-4ed6-877b-27c4fa0d3339","Type":"ContainerStarted","Data":"cd7de1b7dde47bbef1ebfb35677addef4c5ffa2451e57f54f1acbc14f4d920b2"} Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.805138 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.820390 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.836356 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.848944 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-run-k8s-cni-cncf-io\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849023 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e3e33a72-0a49-4944-a2c2-ac16183942cf-multus-daemon-config\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849051 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bh9c9\" (UniqueName: \"kubernetes.io/projected/e3e33a72-0a49-4944-a2c2-ac16183942cf-kube-api-access-bh9c9\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849077 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a1ab1b2e-3bf7-4956-9042-66429245b189-cni-binary-copy\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849102 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-multus-cni-dir\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849137 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-hostroot\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849176 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a1ab1b2e-3bf7-4956-9042-66429245b189-tuning-conf-dir\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849197 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a1ab1b2e-3bf7-4956-9042-66429245b189-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849253 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-run-multus-certs\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849276 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-etc-kubernetes\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849299 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a1ab1b2e-3bf7-4956-9042-66429245b189-os-release\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849319 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e3e33a72-0a49-4944-a2c2-ac16183942cf-cni-binary-copy\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849349 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-multus-conf-dir\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849371 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/eb371a63-6d82-453e-930e-656710b97f10-rootfs\") pod \"machine-config-daemon-p4zv6\" (UID: \"eb371a63-6d82-453e-930e-656710b97f10\") " pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849395 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-cnibin\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849418 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-multus-socket-dir-parent\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849444 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a1ab1b2e-3bf7-4956-9042-66429245b189-cnibin\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849467 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-os-release\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849490 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-system-cni-dir\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849511 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb371a63-6d82-453e-930e-656710b97f10-mcd-auth-proxy-config\") pod \"machine-config-daemon-p4zv6\" (UID: \"eb371a63-6d82-453e-930e-656710b97f10\") " pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849545 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-run-netns\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849569 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-var-lib-cni-multus\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849595 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a1ab1b2e-3bf7-4956-9042-66429245b189-system-cni-dir\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849619 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-var-lib-kubelet\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849710 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dldw7\" (UniqueName: \"kubernetes.io/projected/a1ab1b2e-3bf7-4956-9042-66429245b189-kube-api-access-dldw7\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849804 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-var-lib-cni-bin\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849839 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mp5r\" (UniqueName: \"kubernetes.io/projected/eb371a63-6d82-453e-930e-656710b97f10-kube-api-access-6mp5r\") pod \"machine-config-daemon-p4zv6\" (UID: \"eb371a63-6d82-453e-930e-656710b97f10\") " pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.849864 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb371a63-6d82-453e-930e-656710b97f10-proxy-tls\") pod \"machine-config-daemon-p4zv6\" (UID: \"eb371a63-6d82-453e-930e-656710b97f10\") " pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.853306 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.867850 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.878940 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.891476 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.909149 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.922747 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.943815 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951110 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e3e33a72-0a49-4944-a2c2-ac16183942cf-cni-binary-copy\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951139 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-run-multus-certs\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951156 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-etc-kubernetes\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951174 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a1ab1b2e-3bf7-4956-9042-66429245b189-os-release\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951199 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-multus-conf-dir\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951234 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/eb371a63-6d82-453e-930e-656710b97f10-rootfs\") pod \"machine-config-daemon-p4zv6\" (UID: \"eb371a63-6d82-453e-930e-656710b97f10\") " pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951256 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-cnibin\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951276 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-multus-socket-dir-parent\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951291 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a1ab1b2e-3bf7-4956-9042-66429245b189-cnibin\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951306 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-os-release\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951323 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb371a63-6d82-453e-930e-656710b97f10-mcd-auth-proxy-config\") pod \"machine-config-daemon-p4zv6\" (UID: \"eb371a63-6d82-453e-930e-656710b97f10\") " pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951341 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-system-cni-dir\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951364 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-run-netns\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951370 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-run-multus-certs\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951405 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-var-lib-cni-multus\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951382 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-var-lib-cni-multus\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951430 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-multus-conf-dir\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951447 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a1ab1b2e-3bf7-4956-9042-66429245b189-system-cni-dir\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951478 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-cnibin\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951478 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-var-lib-kubelet\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951506 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-multus-socket-dir-parent\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951526 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a1ab1b2e-3bf7-4956-9042-66429245b189-cnibin\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951522 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dldw7\" (UniqueName: \"kubernetes.io/projected/a1ab1b2e-3bf7-4956-9042-66429245b189-kube-api-access-dldw7\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951549 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-var-lib-cni-bin\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951565 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb371a63-6d82-453e-930e-656710b97f10-proxy-tls\") pod \"machine-config-daemon-p4zv6\" (UID: \"eb371a63-6d82-453e-930e-656710b97f10\") " pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951583 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mp5r\" (UniqueName: \"kubernetes.io/projected/eb371a63-6d82-453e-930e-656710b97f10-kube-api-access-6mp5r\") pod \"machine-config-daemon-p4zv6\" (UID: \"eb371a63-6d82-453e-930e-656710b97f10\") " pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951611 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-system-cni-dir\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951631 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-run-k8s-cni-cncf-io\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951617 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-run-k8s-cni-cncf-io\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951653 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-run-netns\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951519 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a1ab1b2e-3bf7-4956-9042-66429245b189-os-release\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951671 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bh9c9\" (UniqueName: \"kubernetes.io/projected/e3e33a72-0a49-4944-a2c2-ac16183942cf-kube-api-access-bh9c9\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951708 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e3e33a72-0a49-4944-a2c2-ac16183942cf-multus-daemon-config\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951742 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a1ab1b2e-3bf7-4956-9042-66429245b189-cni-binary-copy\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951772 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-multus-cni-dir\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951802 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-hostroot\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951835 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a1ab1b2e-3bf7-4956-9042-66429245b189-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951849 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a1ab1b2e-3bf7-4956-9042-66429245b189-system-cni-dir\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951451 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/eb371a63-6d82-453e-930e-656710b97f10-rootfs\") pod \"machine-config-daemon-p4zv6\" (UID: \"eb371a63-6d82-453e-930e-656710b97f10\") " pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951888 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a1ab1b2e-3bf7-4956-9042-66429245b189-tuning-conf-dir\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951922 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-var-lib-kubelet\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.952098 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e3e33a72-0a49-4944-a2c2-ac16183942cf-cni-binary-copy\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.952260 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-os-release\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951563 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-etc-kubernetes\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.952686 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a1ab1b2e-3bf7-4956-9042-66429245b189-tuning-conf-dir\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.952757 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb371a63-6d82-453e-930e-656710b97f10-mcd-auth-proxy-config\") pod \"machine-config-daemon-p4zv6\" (UID: \"eb371a63-6d82-453e-930e-656710b97f10\") " pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.952835 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-hostroot\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.951677 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-host-var-lib-cni-bin\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.952837 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e3e33a72-0a49-4944-a2c2-ac16183942cf-multus-daemon-config\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.953099 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e3e33a72-0a49-4944-a2c2-ac16183942cf-multus-cni-dir\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.953680 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a1ab1b2e-3bf7-4956-9042-66429245b189-cni-binary-copy\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.953711 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a1ab1b2e-3bf7-4956-9042-66429245b189-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.956948 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb371a63-6d82-453e-930e-656710b97f10-proxy-tls\") pod \"machine-config-daemon-p4zv6\" (UID: \"eb371a63-6d82-453e-930e-656710b97f10\") " pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.960856 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.970843 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mp5r\" (UniqueName: \"kubernetes.io/projected/eb371a63-6d82-453e-930e-656710b97f10-kube-api-access-6mp5r\") pod \"machine-config-daemon-p4zv6\" (UID: \"eb371a63-6d82-453e-930e-656710b97f10\") " pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.974522 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dldw7\" (UniqueName: \"kubernetes.io/projected/a1ab1b2e-3bf7-4956-9042-66429245b189-kube-api-access-dldw7\") pod \"multus-additional-cni-plugins-s6g9s\" (UID: \"a1ab1b2e-3bf7-4956-9042-66429245b189\") " pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.975712 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.977918 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bh9c9\" (UniqueName: \"kubernetes.io/projected/e3e33a72-0a49-4944-a2c2-ac16183942cf-kube-api-access-bh9c9\") pod \"multus-c5crr\" (UID: \"e3e33a72-0a49-4944-a2c2-ac16183942cf\") " pod="openshift-multus/multus-c5crr" Sep 30 20:13:57 crc kubenswrapper[4919]: I0930 20:13:57.991414 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:57Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.006338 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.021932 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.040436 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.041602 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.048016 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:13:58 crc kubenswrapper[4919]: W0930 20:13:58.052144 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1ab1b2e_3bf7_4956_9042_66429245b189.slice/crio-80de503f3812d55b3ccdf01d2627e0089a99fb76a175ddd3127a495303f3b292 WatchSource:0}: Error finding container 80de503f3812d55b3ccdf01d2627e0089a99fb76a175ddd3127a495303f3b292: Status 404 returned error can't find the container with id 80de503f3812d55b3ccdf01d2627e0089a99fb76a175ddd3127a495303f3b292 Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.054282 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-c5crr" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.062857 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.074969 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.090530 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.107780 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4p25c"] Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.109008 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.118830 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.119034 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.119146 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.119254 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.119287 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.119466 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.119156 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.129835 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.134088 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.135056 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.141315 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.153160 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.171291 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.187396 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.210632 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.226242 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.247252 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255067 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-var-lib-openvswitch\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255107 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-run-ovn-kubernetes\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255130 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-kubelet\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255150 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-ovnkube-script-lib\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255165 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-log-socket\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255319 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-slash\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255377 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-systemd-units\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255406 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-openvswitch\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255431 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-env-overrides\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255453 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-cni-netd\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255473 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/af48d482-2587-4521-ba91-56d35b0e487d-ovn-node-metrics-cert\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255508 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-etc-openvswitch\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255530 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-cni-bin\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255559 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-run-netns\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255582 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255606 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-ovnkube-config\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255627 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-ovn\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255647 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-node-log\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255704 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-systemd\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.255738 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jx9g\" (UniqueName: \"kubernetes.io/projected/af48d482-2587-4521-ba91-56d35b0e487d-kube-api-access-9jx9g\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.272667 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.293485 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.309456 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.323503 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.337628 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.356742 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-cni-netd\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.356786 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/af48d482-2587-4521-ba91-56d35b0e487d-ovn-node-metrics-cert\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.356804 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-cni-bin\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.356829 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-etc-openvswitch\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.356852 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.356873 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-run-netns\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.356890 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-ovnkube-config\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.356886 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-cni-netd\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.356960 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-ovn\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.356961 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-cni-bin\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.356907 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-ovn\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.356982 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-etc-openvswitch\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357006 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-node-log\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357030 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357050 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-systemd\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357063 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-run-netns\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357077 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jx9g\" (UniqueName: \"kubernetes.io/projected/af48d482-2587-4521-ba91-56d35b0e487d-kube-api-access-9jx9g\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357095 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-kubelet\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357112 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-var-lib-openvswitch\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357127 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-run-ovn-kubernetes\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357148 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-ovnkube-script-lib\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357165 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-log-socket\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357193 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-systemd-units\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357221 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-slash\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357249 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-openvswitch\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357269 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-env-overrides\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357416 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-var-lib-openvswitch\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357484 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-node-log\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357517 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-systemd\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357788 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-kubelet\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357808 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-env-overrides\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357826 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-slash\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357829 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-log-socket\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357866 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-ovnkube-config\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357832 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-systemd-units\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357850 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-openvswitch\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.357905 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-run-ovn-kubernetes\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.358427 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-ovnkube-script-lib\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.361504 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.361858 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/af48d482-2587-4521-ba91-56d35b0e487d-ovn-node-metrics-cert\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.375988 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jx9g\" (UniqueName: \"kubernetes.io/projected/af48d482-2587-4521-ba91-56d35b0e487d-kube-api-access-9jx9g\") pod \"ovnkube-node-4p25c\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.397816 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.411744 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.433108 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.445887 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.458598 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.471572 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.482827 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.495037 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.523819 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.529371 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.538890 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: W0930 20:13:58.542207 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf48d482_2587_4521_ba91_56d35b0e487d.slice/crio-d4fcb3b0835be6a2d0e8731449093aefd3e1f127203cadd09a92a3eb443ec0f9 WatchSource:0}: Error finding container d4fcb3b0835be6a2d0e8731449093aefd3e1f127203cadd09a92a3eb443ec0f9: Status 404 returned error can't find the container with id d4fcb3b0835be6a2d0e8731449093aefd3e1f127203cadd09a92a3eb443ec0f9 Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.550462 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.568044 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.631623 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.631657 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.631636 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.631816 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.631927 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.632099 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.805036 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5"} Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.806764 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341" exitCode=0 Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.806835 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341"} Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.806872 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"d4fcb3b0835be6a2d0e8731449093aefd3e1f127203cadd09a92a3eb443ec0f9"} Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.808539 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2"} Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.808575 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa"} Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.808629 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"27988a6fa297597db932913ad2bffcb5cb6bb0ab2f6975fcab77c61dab9e487c"} Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.810303 4919 generic.go:334] "Generic (PLEG): container finished" podID="a1ab1b2e-3bf7-4956-9042-66429245b189" containerID="059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de" exitCode=0 Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.810392 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" event={"ID":"a1ab1b2e-3bf7-4956-9042-66429245b189","Type":"ContainerDied","Data":"059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de"} Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.810448 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" event={"ID":"a1ab1b2e-3bf7-4956-9042-66429245b189","Type":"ContainerStarted","Data":"80de503f3812d55b3ccdf01d2627e0089a99fb76a175ddd3127a495303f3b292"} Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.811773 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-5xx2l" event={"ID":"02a9256e-b65e-4ed6-877b-27c4fa0d3339","Type":"ContainerStarted","Data":"feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49"} Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.813156 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-c5crr" event={"ID":"e3e33a72-0a49-4944-a2c2-ac16183942cf","Type":"ContainerStarted","Data":"503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a"} Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.813200 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-c5crr" event={"ID":"e3e33a72-0a49-4944-a2c2-ac16183942cf","Type":"ContainerStarted","Data":"3b2ed22f033b8ab0d13bee3b8abaf6025b137dbf6eeac36d9e5f43421d729de1"} Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.836714 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.862874 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.863280 4919 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.863375 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:02.863352199 +0000 UTC m=+27.979385326 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.866123 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.915189 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.949185 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.963398 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.963552 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.963585 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.963613 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.963698 4919 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.963749 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:02.963733915 +0000 UTC m=+28.079767032 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.964068 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.964089 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.964100 4919 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.964136 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:14:02.964099145 +0000 UTC m=+28.080132412 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.964145 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.964222 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.964237 4919 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.964187 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:02.964175348 +0000 UTC m=+28.080208685 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:58 crc kubenswrapper[4919]: E0930 20:13:58.964287 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:02.964277881 +0000 UTC m=+28.080311008 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.964720 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.982918 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:58 crc kubenswrapper[4919]: I0930 20:13:58.996036 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:58Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.010084 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.023133 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.035950 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.062024 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.081964 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.092835 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.106150 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.122923 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.137356 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.149237 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.164771 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.182992 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.199075 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.212952 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.227744 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.240655 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.254528 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.270649 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.289536 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.746320 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.761597 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.765621 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.768063 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.784300 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.800506 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.823180 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e"} Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.823354 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.823439 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459"} Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.823619 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851"} Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.823633 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44"} Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.823646 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba"} Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.825701 4919 generic.go:334] "Generic (PLEG): container finished" podID="a1ab1b2e-3bf7-4956-9042-66429245b189" containerID="a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc" exitCode=0 Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.825936 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" event={"ID":"a1ab1b2e-3bf7-4956-9042-66429245b189","Type":"ContainerDied","Data":"a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc"} Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.856364 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.873303 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.904053 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.916362 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.928907 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.947025 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.961978 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.976279 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:13:59 crc kubenswrapper[4919]: I0930 20:13:59.992274 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:13:59Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.006628 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.027006 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.044037 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.057528 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.074019 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.088199 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.106958 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.121438 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.133194 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.148801 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.168888 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.182175 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.191659 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.203387 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.400567 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-cdffv"] Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.401061 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-cdffv" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.403080 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.403125 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.403999 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.404083 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.416761 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.429819 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.447958 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.464876 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.476431 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.481540 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/bef69394-3e21-4893-a952-1a0e1817e00f-serviceca\") pod \"node-ca-cdffv\" (UID: \"bef69394-3e21-4893-a952-1a0e1817e00f\") " pod="openshift-image-registry/node-ca-cdffv" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.481605 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bef69394-3e21-4893-a952-1a0e1817e00f-host\") pod \"node-ca-cdffv\" (UID: \"bef69394-3e21-4893-a952-1a0e1817e00f\") " pod="openshift-image-registry/node-ca-cdffv" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.481778 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-td7xg\" (UniqueName: \"kubernetes.io/projected/bef69394-3e21-4893-a952-1a0e1817e00f-kube-api-access-td7xg\") pod \"node-ca-cdffv\" (UID: \"bef69394-3e21-4893-a952-1a0e1817e00f\") " pod="openshift-image-registry/node-ca-cdffv" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.491408 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.518498 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.559058 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.582799 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-td7xg\" (UniqueName: \"kubernetes.io/projected/bef69394-3e21-4893-a952-1a0e1817e00f-kube-api-access-td7xg\") pod \"node-ca-cdffv\" (UID: \"bef69394-3e21-4893-a952-1a0e1817e00f\") " pod="openshift-image-registry/node-ca-cdffv" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.582885 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/bef69394-3e21-4893-a952-1a0e1817e00f-serviceca\") pod \"node-ca-cdffv\" (UID: \"bef69394-3e21-4893-a952-1a0e1817e00f\") " pod="openshift-image-registry/node-ca-cdffv" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.582937 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bef69394-3e21-4893-a952-1a0e1817e00f-host\") pod \"node-ca-cdffv\" (UID: \"bef69394-3e21-4893-a952-1a0e1817e00f\") " pod="openshift-image-registry/node-ca-cdffv" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.583015 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bef69394-3e21-4893-a952-1a0e1817e00f-host\") pod \"node-ca-cdffv\" (UID: \"bef69394-3e21-4893-a952-1a0e1817e00f\") " pod="openshift-image-registry/node-ca-cdffv" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.584203 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/bef69394-3e21-4893-a952-1a0e1817e00f-serviceca\") pod \"node-ca-cdffv\" (UID: \"bef69394-3e21-4893-a952-1a0e1817e00f\") " pod="openshift-image-registry/node-ca-cdffv" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.600264 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.631574 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.631712 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:00 crc kubenswrapper[4919]: E0930 20:14:00.631836 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.631855 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:00 crc kubenswrapper[4919]: E0930 20:14:00.631984 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:00 crc kubenswrapper[4919]: E0930 20:14:00.632069 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.633821 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-td7xg\" (UniqueName: \"kubernetes.io/projected/bef69394-3e21-4893-a952-1a0e1817e00f-kube-api-access-td7xg\") pod \"node-ca-cdffv\" (UID: \"bef69394-3e21-4893-a952-1a0e1817e00f\") " pod="openshift-image-registry/node-ca-cdffv" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.664768 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.703861 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.714675 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-cdffv" Sep 30 20:14:00 crc kubenswrapper[4919]: W0930 20:14:00.731466 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbef69394_3e21_4893_a952_1a0e1817e00f.slice/crio-f48ee660fbe605d730ad88aa4b3cdc5e5be348d5c0d395ef405a3bb86ec0c907 WatchSource:0}: Error finding container f48ee660fbe605d730ad88aa4b3cdc5e5be348d5c0d395ef405a3bb86ec0c907: Status 404 returned error can't find the container with id f48ee660fbe605d730ad88aa4b3cdc5e5be348d5c0d395ef405a3bb86ec0c907 Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.742084 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.777820 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.817327 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.843906 4919 generic.go:334] "Generic (PLEG): container finished" podID="a1ab1b2e-3bf7-4956-9042-66429245b189" containerID="6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1" exitCode=0 Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.844284 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" event={"ID":"a1ab1b2e-3bf7-4956-9042-66429245b189","Type":"ContainerDied","Data":"6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1"} Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.846794 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-cdffv" event={"ID":"bef69394-3e21-4893-a952-1a0e1817e00f","Type":"ContainerStarted","Data":"f48ee660fbe605d730ad88aa4b3cdc5e5be348d5c0d395ef405a3bb86ec0c907"} Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.858178 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1"} Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.865760 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.899896 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.939291 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:00 crc kubenswrapper[4919]: I0930 20:14:00.982620 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:00Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.018181 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.060926 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.104902 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.143095 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.179926 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.219695 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.261615 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.304473 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.339093 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.377917 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.424475 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.463738 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.623366 4919 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.626700 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.626756 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.626767 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.626920 4919 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.634550 4919 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.635053 4919 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.636601 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.636639 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.636649 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.636661 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.636672 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:01Z","lastTransitionTime":"2025-09-30T20:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:01 crc kubenswrapper[4919]: E0930 20:14:01.655245 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.659992 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.660042 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.660055 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.660074 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.660089 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:01Z","lastTransitionTime":"2025-09-30T20:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:01 crc kubenswrapper[4919]: E0930 20:14:01.674432 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.678895 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.678950 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.678963 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.678988 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.679003 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:01Z","lastTransitionTime":"2025-09-30T20:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:01 crc kubenswrapper[4919]: E0930 20:14:01.692578 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.697263 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.697332 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.697353 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.697383 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.697407 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:01Z","lastTransitionTime":"2025-09-30T20:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:01 crc kubenswrapper[4919]: E0930 20:14:01.725315 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.732202 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.732259 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.732272 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.732295 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.732307 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:01Z","lastTransitionTime":"2025-09-30T20:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:01 crc kubenswrapper[4919]: E0930 20:14:01.782010 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: E0930 20:14:01.782147 4919 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.784389 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.784424 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.784436 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.784452 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.784697 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:01Z","lastTransitionTime":"2025-09-30T20:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.862714 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-cdffv" event={"ID":"bef69394-3e21-4893-a952-1a0e1817e00f","Type":"ContainerStarted","Data":"51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9"} Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.866099 4919 generic.go:334] "Generic (PLEG): container finished" podID="a1ab1b2e-3bf7-4956-9042-66429245b189" containerID="00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e" exitCode=0 Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.866149 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" event={"ID":"a1ab1b2e-3bf7-4956-9042-66429245b189","Type":"ContainerDied","Data":"00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e"} Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.881567 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.891293 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.891341 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.891354 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.891378 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.891391 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:01Z","lastTransitionTime":"2025-09-30T20:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.900904 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.913861 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.928292 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.941529 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.965303 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.981089 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.995206 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.995257 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.995267 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.995283 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.995293 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:01Z","lastTransitionTime":"2025-09-30T20:14:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:01 crc kubenswrapper[4919]: I0930 20:14:01.995481 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:01Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.010848 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.023488 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.037986 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.052626 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.063825 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.084444 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.098852 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.098906 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.098918 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.098940 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.098953 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:02Z","lastTransitionTime":"2025-09-30T20:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.108353 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.143569 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.185271 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.202869 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.202910 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.202919 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.202937 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.202950 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:02Z","lastTransitionTime":"2025-09-30T20:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.227533 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.261518 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.300817 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.305956 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.306001 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.306014 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.306043 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.306055 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:02Z","lastTransitionTime":"2025-09-30T20:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.344777 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.386633 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.409005 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.409055 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.409071 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.409094 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.409110 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:02Z","lastTransitionTime":"2025-09-30T20:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.422066 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.465325 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.502723 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.519037 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.519098 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.519111 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.519134 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.519149 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:02Z","lastTransitionTime":"2025-09-30T20:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.548055 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.583780 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.622761 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.622997 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.623142 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.623341 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.623548 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:02Z","lastTransitionTime":"2025-09-30T20:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.623308 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.631462 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.631560 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:02 crc kubenswrapper[4919]: E0930 20:14:02.631644 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:02 crc kubenswrapper[4919]: E0930 20:14:02.631727 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.631982 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:02 crc kubenswrapper[4919]: E0930 20:14:02.632311 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.662838 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.701118 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.727418 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.727483 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.727495 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.727515 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.727529 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:02Z","lastTransitionTime":"2025-09-30T20:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.830867 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.830927 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.830940 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.830963 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.830980 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:02Z","lastTransitionTime":"2025-09-30T20:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.873942 4919 generic.go:334] "Generic (PLEG): container finished" podID="a1ab1b2e-3bf7-4956-9042-66429245b189" containerID="94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115" exitCode=0 Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.874016 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" event={"ID":"a1ab1b2e-3bf7-4956-9042-66429245b189","Type":"ContainerDied","Data":"94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115"} Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.881331 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826"} Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.898431 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.907431 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:02 crc kubenswrapper[4919]: E0930 20:14:02.907655 4919 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:14:02 crc kubenswrapper[4919]: E0930 20:14:02.907758 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:10.907734594 +0000 UTC m=+36.023767731 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.919761 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.934896 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.934958 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.934977 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.935001 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.935019 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:02Z","lastTransitionTime":"2025-09-30T20:14:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.942501 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.977525 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:02 crc kubenswrapper[4919]: I0930 20:14:02.996780 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:02Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.008948 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.009265 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.009395 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.009491 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:03 crc kubenswrapper[4919]: E0930 20:14:03.010371 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:14:03 crc kubenswrapper[4919]: E0930 20:14:03.010428 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:14:03 crc kubenswrapper[4919]: E0930 20:14:03.010439 4919 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:14:03 crc kubenswrapper[4919]: E0930 20:14:03.010497 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:14:11.010380796 +0000 UTC m=+36.126413963 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:14:03 crc kubenswrapper[4919]: E0930 20:14:03.010568 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:14:03 crc kubenswrapper[4919]: E0930 20:14:03.010589 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:11.010554341 +0000 UTC m=+36.126587488 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:14:03 crc kubenswrapper[4919]: E0930 20:14:03.010599 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:14:03 crc kubenswrapper[4919]: E0930 20:14:03.010620 4919 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:03 crc kubenswrapper[4919]: E0930 20:14:03.010454 4919 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:03 crc kubenswrapper[4919]: E0930 20:14:03.010715 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:11.010682374 +0000 UTC m=+36.126715591 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:03 crc kubenswrapper[4919]: E0930 20:14:03.010996 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:11.010909451 +0000 UTC m=+36.126942688 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.017430 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.039368 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.039752 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.039800 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.039817 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.039847 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.039867 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:03Z","lastTransitionTime":"2025-09-30T20:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.060658 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.075154 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.105206 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.107602 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.145594 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.145785 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.145884 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.146188 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.146371 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:03Z","lastTransitionTime":"2025-09-30T20:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.153464 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.190440 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.223824 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.248672 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.248728 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.248745 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.248768 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.248785 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:03Z","lastTransitionTime":"2025-09-30T20:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.261265 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.305445 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.346569 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.353092 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.353151 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.353175 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.353210 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.353283 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:03Z","lastTransitionTime":"2025-09-30T20:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.389910 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.437643 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.456287 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.456349 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.456373 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.456404 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.456427 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:03Z","lastTransitionTime":"2025-09-30T20:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.466160 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.500416 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.544095 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.559426 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.559488 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.559511 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.559542 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.559565 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:03Z","lastTransitionTime":"2025-09-30T20:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.587393 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.637303 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.662911 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.662959 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.662968 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.662986 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.662997 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:03Z","lastTransitionTime":"2025-09-30T20:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.665512 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.701679 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.742507 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.766141 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.766209 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.766266 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.766294 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.766312 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:03Z","lastTransitionTime":"2025-09-30T20:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.783267 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.823842 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.864409 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.869896 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.869973 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.869998 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.870037 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.870062 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:03Z","lastTransitionTime":"2025-09-30T20:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.890880 4919 generic.go:334] "Generic (PLEG): container finished" podID="a1ab1b2e-3bf7-4956-9042-66429245b189" containerID="af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30" exitCode=0 Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.890944 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" event={"ID":"a1ab1b2e-3bf7-4956-9042-66429245b189","Type":"ContainerDied","Data":"af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30"} Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.902136 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.946072 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.974093 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.974164 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.974190 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.974263 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.974290 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:03Z","lastTransitionTime":"2025-09-30T20:14:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:03 crc kubenswrapper[4919]: I0930 20:14:03.987851 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.021559 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.059136 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.076516 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.076567 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.076582 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.076603 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.076618 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:04Z","lastTransitionTime":"2025-09-30T20:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.109521 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.140125 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.179882 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.179937 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.179950 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.179968 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.179979 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:04Z","lastTransitionTime":"2025-09-30T20:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.182102 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.219572 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.259874 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.282263 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.282295 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.282306 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.282321 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.282331 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:04Z","lastTransitionTime":"2025-09-30T20:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.303102 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.344850 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.385237 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.385304 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.385346 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.385374 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.385388 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:04Z","lastTransitionTime":"2025-09-30T20:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.386853 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.422241 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.470553 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.488843 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.488900 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.488917 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.488944 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.488964 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:04Z","lastTransitionTime":"2025-09-30T20:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.503195 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.592019 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.592087 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.592106 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.592135 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.592156 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:04Z","lastTransitionTime":"2025-09-30T20:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.631521 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.631587 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.631527 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:04 crc kubenswrapper[4919]: E0930 20:14:04.631742 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:04 crc kubenswrapper[4919]: E0930 20:14:04.631972 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:04 crc kubenswrapper[4919]: E0930 20:14:04.632102 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.695557 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.695620 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.695644 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.695675 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.695698 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:04Z","lastTransitionTime":"2025-09-30T20:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.798518 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.798574 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.798594 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.798621 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.798644 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:04Z","lastTransitionTime":"2025-09-30T20:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.899851 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07"} Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.900190 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.900268 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.900382 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.900414 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.900429 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.900450 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.900502 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:04Z","lastTransitionTime":"2025-09-30T20:14:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.906396 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" event={"ID":"a1ab1b2e-3bf7-4956-9042-66429245b189","Type":"ContainerStarted","Data":"71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d"} Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.936090 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.937104 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.947325 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.958789 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:04 crc kubenswrapper[4919]: I0930 20:14:04.993756 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:04Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.002638 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.002679 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.002692 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.002712 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.002729 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:05Z","lastTransitionTime":"2025-09-30T20:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.015477 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.031633 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.050506 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.067338 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.084316 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.107036 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.107100 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.107122 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.107148 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.107168 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:05Z","lastTransitionTime":"2025-09-30T20:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.109098 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.133565 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.156089 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.178577 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.198693 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.210206 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.210310 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.210329 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.210355 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.210372 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:05Z","lastTransitionTime":"2025-09-30T20:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.219885 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.236119 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.261917 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.277990 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.290693 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.310999 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.314668 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.314733 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.314751 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.314779 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.314798 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:05Z","lastTransitionTime":"2025-09-30T20:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.328697 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.347807 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.385073 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.417564 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.417656 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.417671 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.417693 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.417708 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:05Z","lastTransitionTime":"2025-09-30T20:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.422333 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.460549 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.502560 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.520524 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.520584 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.520594 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.520610 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.520620 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:05Z","lastTransitionTime":"2025-09-30T20:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.538239 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.578425 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.617840 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.622877 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.622925 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.622937 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.622959 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.622969 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:05Z","lastTransitionTime":"2025-09-30T20:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.660760 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.701953 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.725753 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.725806 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.725819 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.725840 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.725853 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:05Z","lastTransitionTime":"2025-09-30T20:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.743290 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.784743 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.825906 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.828574 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.828689 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.828716 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.828750 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.828773 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:05Z","lastTransitionTime":"2025-09-30T20:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.867498 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.902595 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.909593 4919 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.931295 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.931340 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.931350 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.931369 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.931382 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:05Z","lastTransitionTime":"2025-09-30T20:14:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.940968 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:05 crc kubenswrapper[4919]: I0930 20:14:05.983119 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.024977 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:06Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.034179 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.034254 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.034265 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.034286 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.034300 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:06Z","lastTransitionTime":"2025-09-30T20:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.059742 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:06Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.102038 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:06Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.138884 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.138930 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.138944 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.138968 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.138983 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:06Z","lastTransitionTime":"2025-09-30T20:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.144370 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:06Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.185922 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:06Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.219861 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:06Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.242165 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.242231 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.242246 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.242265 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.242278 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:06Z","lastTransitionTime":"2025-09-30T20:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.266420 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:06Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.310450 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:06Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.345637 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.345842 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.345933 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.346026 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.346108 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:06Z","lastTransitionTime":"2025-09-30T20:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.450071 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.450126 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.450136 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.450158 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.450170 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:06Z","lastTransitionTime":"2025-09-30T20:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.554046 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.554120 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.554142 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.554169 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.554189 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:06Z","lastTransitionTime":"2025-09-30T20:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.631319 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:06 crc kubenswrapper[4919]: E0930 20:14:06.631506 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.631801 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.631961 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:06 crc kubenswrapper[4919]: E0930 20:14:06.632145 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:06 crc kubenswrapper[4919]: E0930 20:14:06.632177 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.656824 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.657037 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.657141 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.657600 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.657704 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:06Z","lastTransitionTime":"2025-09-30T20:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.760848 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.760886 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.760896 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.760913 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.760927 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:06Z","lastTransitionTime":"2025-09-30T20:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.863820 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.863860 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.863876 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.863893 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.863908 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:06Z","lastTransitionTime":"2025-09-30T20:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.915694 4919 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.967363 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.967443 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.967463 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.967575 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:06 crc kubenswrapper[4919]: I0930 20:14:06.967618 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:06Z","lastTransitionTime":"2025-09-30T20:14:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.071263 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.072113 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.072191 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.072296 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.072383 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:07Z","lastTransitionTime":"2025-09-30T20:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.176332 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.176414 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.176434 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.176463 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.176486 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:07Z","lastTransitionTime":"2025-09-30T20:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.279253 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.279318 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.279350 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.279377 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.279401 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:07Z","lastTransitionTime":"2025-09-30T20:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.382284 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.382341 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.382360 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.382386 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.382404 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:07Z","lastTransitionTime":"2025-09-30T20:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.486138 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.486197 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.486239 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.486294 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.486320 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:07Z","lastTransitionTime":"2025-09-30T20:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.589979 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.590079 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.590104 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.590134 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.590276 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:07Z","lastTransitionTime":"2025-09-30T20:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.693411 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.693466 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.693486 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.693510 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.693529 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:07Z","lastTransitionTime":"2025-09-30T20:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.796871 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.797393 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.797583 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.797742 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.797886 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:07Z","lastTransitionTime":"2025-09-30T20:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.901045 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.901096 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.901115 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.901141 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.901161 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:07Z","lastTransitionTime":"2025-09-30T20:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.922063 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/0.log" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.925646 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07" exitCode=1 Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.925712 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07"} Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.926594 4919 scope.go:117] "RemoveContainer" containerID="6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.951679 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:07Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.975457 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:07Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:07 crc kubenswrapper[4919]: I0930 20:14:07.998809 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:07Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.005619 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.005710 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.005729 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.005768 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.005788 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:08Z","lastTransitionTime":"2025-09-30T20:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.038511 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.062801 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.084525 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.107207 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.109502 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.109560 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.109582 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.109608 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.109627 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:08Z","lastTransitionTime":"2025-09-30T20:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.127010 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.146048 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.168001 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.202417 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:07Z\\\",\\\"message\\\":\\\"930 20:14:07.527497 6237 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 20:14:07.528917 6237 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 20:14:07.528952 6237 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 20:14:07.529012 6237 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 20:14:07.529083 6237 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 20:14:07.529124 6237 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:07.529131 6237 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:07.529205 6237 factory.go:656] Stopping watch factory\\\\nI0930 20:14:07.529269 6237 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:07.529312 6237 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 20:14:07.529330 6237 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:07.529341 6237 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:07.529352 6237 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:07.529364 6237 handler.go:208] Removed *v1.Node event handler 2\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.213362 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.213429 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.213450 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.213478 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.213503 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:08Z","lastTransitionTime":"2025-09-30T20:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.230893 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.251876 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.288556 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.305511 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.316919 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.316956 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.316969 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.316986 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.317001 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:08Z","lastTransitionTime":"2025-09-30T20:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.419770 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.419852 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.419881 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.419917 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.419944 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:08Z","lastTransitionTime":"2025-09-30T20:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.554171 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.554250 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.554260 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.554276 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.554286 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:08Z","lastTransitionTime":"2025-09-30T20:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.631876 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.631951 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.631999 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:08 crc kubenswrapper[4919]: E0930 20:14:08.632022 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:08 crc kubenswrapper[4919]: E0930 20:14:08.632157 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:08 crc kubenswrapper[4919]: E0930 20:14:08.632314 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.657882 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.657936 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.657947 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.657970 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.657985 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:08Z","lastTransitionTime":"2025-09-30T20:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.760442 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.760489 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.760500 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.760523 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.760536 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:08Z","lastTransitionTime":"2025-09-30T20:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.864400 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.864465 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.864488 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.864514 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.864533 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:08Z","lastTransitionTime":"2025-09-30T20:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.931090 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/0.log" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.934419 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b"} Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.934591 4919 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.950229 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.967372 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.967432 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.967447 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.967466 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.967479 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:08Z","lastTransitionTime":"2025-09-30T20:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.970585 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:08 crc kubenswrapper[4919]: I0930 20:14:08.989340 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:08Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.013255 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:07Z\\\",\\\"message\\\":\\\"930 20:14:07.527497 6237 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 20:14:07.528917 6237 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 20:14:07.528952 6237 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 20:14:07.529012 6237 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 20:14:07.529083 6237 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 20:14:07.529124 6237 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:07.529131 6237 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:07.529205 6237 factory.go:656] Stopping watch factory\\\\nI0930 20:14:07.529269 6237 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:07.529312 6237 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 20:14:07.529330 6237 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:07.529341 6237 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:07.529352 6237 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:07.529364 6237 handler.go:208] Removed *v1.Node event handler 2\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.031846 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.049061 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.063470 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.069882 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.069950 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.069969 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.069996 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.070016 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:09Z","lastTransitionTime":"2025-09-30T20:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.075417 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.089159 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.103438 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.124921 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.158178 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.173303 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.173375 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.173395 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.173431 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.173460 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:09Z","lastTransitionTime":"2025-09-30T20:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.179474 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.195843 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.219593 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.275656 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.275700 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.275713 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.275732 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.275746 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:09Z","lastTransitionTime":"2025-09-30T20:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.379120 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.379179 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.379192 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.379235 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.379250 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:09Z","lastTransitionTime":"2025-09-30T20:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.482669 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.482731 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.482748 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.482772 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.482794 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:09Z","lastTransitionTime":"2025-09-30T20:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.586337 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.586393 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.586406 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.586429 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.586444 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:09Z","lastTransitionTime":"2025-09-30T20:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.690151 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.690196 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.690233 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.690256 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.690279 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:09Z","lastTransitionTime":"2025-09-30T20:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.793449 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.793523 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.793541 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.793574 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.793591 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:09Z","lastTransitionTime":"2025-09-30T20:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.896607 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.896659 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.896668 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.896688 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.896701 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:09Z","lastTransitionTime":"2025-09-30T20:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.941079 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/1.log" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.942101 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/0.log" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.951812 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b" exitCode=1 Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.951913 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b"} Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.952015 4919 scope.go:117] "RemoveContainer" containerID="6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.953025 4919 scope.go:117] "RemoveContainer" containerID="a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b" Sep 30 20:14:09 crc kubenswrapper[4919]: E0930 20:14:09.953282 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.972979 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.990436 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:09Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.999517 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.999559 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.999573 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.999595 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:09 crc kubenswrapper[4919]: I0930 20:14:09.999608 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:09Z","lastTransitionTime":"2025-09-30T20:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.006722 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.020753 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.035958 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.065906 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:07Z\\\",\\\"message\\\":\\\"930 20:14:07.527497 6237 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 20:14:07.528917 6237 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 20:14:07.528952 6237 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 20:14:07.529012 6237 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 20:14:07.529083 6237 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 20:14:07.529124 6237 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:07.529131 6237 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:07.529205 6237 factory.go:656] Stopping watch factory\\\\nI0930 20:14:07.529269 6237 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:07.529312 6237 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 20:14:07.529330 6237 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:07.529341 6237 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:07.529352 6237 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:07.529364 6237 handler.go:208] Removed *v1.Node event handler 2\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:09Z\\\",\\\"message\\\":\\\"997355 6357 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 20:14:08.997372 6357 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:08.997382 6357 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:08.997403 6357 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 20:14:08.997401 6357 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:08.997421 6357 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:08.997434 6357 factory.go:656] Stopping watch factory\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:08.997453 6357 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:08.997491 6357 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 20:14:08.997542 6357 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 20:14:08.997554 6357 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0930 20:14:08.997456 6357 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0930 20:14:08.997653 6357 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.081205 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.099370 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.102854 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.102912 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.102929 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.102949 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.102964 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:10Z","lastTransitionTime":"2025-09-30T20:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.114459 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.129063 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.149720 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.167084 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.179534 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.199258 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.206102 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.206149 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.206166 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.206192 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.206235 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:10Z","lastTransitionTime":"2025-09-30T20:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.212635 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.309488 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.309539 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.309559 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.309585 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.309657 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:10Z","lastTransitionTime":"2025-09-30T20:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.317726 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4"] Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.318504 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.322315 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.322794 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.346574 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.368867 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.384034 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.397913 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.399305 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4930c0a7-d9e1-447d-945c-7d44124b6340-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-tvtj4\" (UID: \"4930c0a7-d9e1-447d-945c-7d44124b6340\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.399402 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4930c0a7-d9e1-447d-945c-7d44124b6340-env-overrides\") pod \"ovnkube-control-plane-749d76644c-tvtj4\" (UID: \"4930c0a7-d9e1-447d-945c-7d44124b6340\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.399625 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4930c0a7-d9e1-447d-945c-7d44124b6340-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-tvtj4\" (UID: \"4930c0a7-d9e1-447d-945c-7d44124b6340\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.399724 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxrx5\" (UniqueName: \"kubernetes.io/projected/4930c0a7-d9e1-447d-945c-7d44124b6340-kube-api-access-rxrx5\") pod \"ovnkube-control-plane-749d76644c-tvtj4\" (UID: \"4930c0a7-d9e1-447d-945c-7d44124b6340\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.412569 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.412629 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.412648 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.412680 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.412700 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:10Z","lastTransitionTime":"2025-09-30T20:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.416978 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.439393 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.472546 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:07Z\\\",\\\"message\\\":\\\"930 20:14:07.527497 6237 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 20:14:07.528917 6237 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 20:14:07.528952 6237 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 20:14:07.529012 6237 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 20:14:07.529083 6237 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 20:14:07.529124 6237 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:07.529131 6237 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:07.529205 6237 factory.go:656] Stopping watch factory\\\\nI0930 20:14:07.529269 6237 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:07.529312 6237 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 20:14:07.529330 6237 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:07.529341 6237 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:07.529352 6237 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:07.529364 6237 handler.go:208] Removed *v1.Node event handler 2\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:09Z\\\",\\\"message\\\":\\\"997355 6357 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 20:14:08.997372 6357 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:08.997382 6357 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:08.997403 6357 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 20:14:08.997401 6357 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:08.997421 6357 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:08.997434 6357 factory.go:656] Stopping watch factory\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:08.997453 6357 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:08.997491 6357 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 20:14:08.997542 6357 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 20:14:08.997554 6357 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0930 20:14:08.997456 6357 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0930 20:14:08.997653 6357 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.488903 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.500795 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4930c0a7-d9e1-447d-945c-7d44124b6340-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-tvtj4\" (UID: \"4930c0a7-d9e1-447d-945c-7d44124b6340\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.500872 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4930c0a7-d9e1-447d-945c-7d44124b6340-env-overrides\") pod \"ovnkube-control-plane-749d76644c-tvtj4\" (UID: \"4930c0a7-d9e1-447d-945c-7d44124b6340\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.500926 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4930c0a7-d9e1-447d-945c-7d44124b6340-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-tvtj4\" (UID: \"4930c0a7-d9e1-447d-945c-7d44124b6340\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.500961 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxrx5\" (UniqueName: \"kubernetes.io/projected/4930c0a7-d9e1-447d-945c-7d44124b6340-kube-api-access-rxrx5\") pod \"ovnkube-control-plane-749d76644c-tvtj4\" (UID: \"4930c0a7-d9e1-447d-945c-7d44124b6340\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.501710 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4930c0a7-d9e1-447d-945c-7d44124b6340-env-overrides\") pod \"ovnkube-control-plane-749d76644c-tvtj4\" (UID: \"4930c0a7-d9e1-447d-945c-7d44124b6340\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.501831 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4930c0a7-d9e1-447d-945c-7d44124b6340-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-tvtj4\" (UID: \"4930c0a7-d9e1-447d-945c-7d44124b6340\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.504843 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.508530 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4930c0a7-d9e1-447d-945c-7d44124b6340-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-tvtj4\" (UID: \"4930c0a7-d9e1-447d-945c-7d44124b6340\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.516635 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.516692 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.516705 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.516732 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.516745 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:10Z","lastTransitionTime":"2025-09-30T20:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.522082 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxrx5\" (UniqueName: \"kubernetes.io/projected/4930c0a7-d9e1-447d-945c-7d44124b6340-kube-api-access-rxrx5\") pod \"ovnkube-control-plane-749d76644c-tvtj4\" (UID: \"4930c0a7-d9e1-447d-945c-7d44124b6340\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.522402 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.539245 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.556870 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.573033 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.591915 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.605833 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.619173 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.619242 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.619255 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.619273 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.619285 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:10Z","lastTransitionTime":"2025-09-30T20:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.630499 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.631143 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.631176 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:10 crc kubenswrapper[4919]: E0930 20:14:10.631333 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:10 crc kubenswrapper[4919]: E0930 20:14:10.631452 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.631676 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:10 crc kubenswrapper[4919]: E0930 20:14:10.631762 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.632839 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:10Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.722950 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.723010 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.723020 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.723040 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.723053 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:10Z","lastTransitionTime":"2025-09-30T20:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.825368 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.825611 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.825623 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.825641 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.825652 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:10Z","lastTransitionTime":"2025-09-30T20:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.928822 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.928885 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.928899 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.928925 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.928941 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:10Z","lastTransitionTime":"2025-09-30T20:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.963485 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/1.log" Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.973440 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" event={"ID":"4930c0a7-d9e1-447d-945c-7d44124b6340","Type":"ContainerStarted","Data":"04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84"} Sep 30 20:14:10 crc kubenswrapper[4919]: I0930 20:14:10.973510 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" event={"ID":"4930c0a7-d9e1-447d-945c-7d44124b6340","Type":"ContainerStarted","Data":"d7d140b584716fb895a429173299f0000e93a53b16f332d6193b92d678515ff0"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.006411 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.006700 4919 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.006831 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:27.006795543 +0000 UTC m=+52.122828710 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.036113 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.036174 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.036190 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.036233 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.036247 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:11Z","lastTransitionTime":"2025-09-30T20:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.107961 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.108095 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.108148 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.108245 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:14:27.108198288 +0000 UTC m=+52.224231415 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.108304 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.108314 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.108325 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.108411 4919 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.108490 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:27.108466196 +0000 UTC m=+52.224499333 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.108315 4919 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.108365 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.108649 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.108668 4919 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.108669 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:27.108617081 +0000 UTC m=+52.224650368 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.108747 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:27.108725414 +0000 UTC m=+52.224758531 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.139572 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.139626 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.139644 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.139668 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.139686 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:11Z","lastTransitionTime":"2025-09-30T20:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.242763 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.242802 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.242812 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.242829 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.242842 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:11Z","lastTransitionTime":"2025-09-30T20:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.345715 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.346255 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.346264 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.346281 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.346292 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:11Z","lastTransitionTime":"2025-09-30T20:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.449540 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.449572 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.449582 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.449597 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.449608 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:11Z","lastTransitionTime":"2025-09-30T20:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.552497 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.552548 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.552559 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.552579 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.552599 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:11Z","lastTransitionTime":"2025-09-30T20:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.655876 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.655926 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.655939 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.655957 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.655972 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:11Z","lastTransitionTime":"2025-09-30T20:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.759520 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.759574 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.759584 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.759603 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.759615 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:11Z","lastTransitionTime":"2025-09-30T20:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.817487 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-bwpdf"] Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.818097 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.818175 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.840502 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:11Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.855609 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:11Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.862729 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.862770 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.862780 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.862796 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.862809 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:11Z","lastTransitionTime":"2025-09-30T20:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.872603 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:11Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.889696 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:11Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.902911 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:11Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.917053 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pp9g2\" (UniqueName: \"kubernetes.io/projected/c0624d31-70fc-4d66-a31b-4e67896ab40e-kube-api-access-pp9g2\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.917152 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.917282 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:11Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.938770 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:07Z\\\",\\\"message\\\":\\\"930 20:14:07.527497 6237 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 20:14:07.528917 6237 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 20:14:07.528952 6237 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 20:14:07.529012 6237 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 20:14:07.529083 6237 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 20:14:07.529124 6237 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:07.529131 6237 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:07.529205 6237 factory.go:656] Stopping watch factory\\\\nI0930 20:14:07.529269 6237 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:07.529312 6237 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 20:14:07.529330 6237 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:07.529341 6237 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:07.529352 6237 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:07.529364 6237 handler.go:208] Removed *v1.Node event handler 2\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:09Z\\\",\\\"message\\\":\\\"997355 6357 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 20:14:08.997372 6357 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:08.997382 6357 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:08.997403 6357 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 20:14:08.997401 6357 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:08.997421 6357 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:08.997434 6357 factory.go:656] Stopping watch factory\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:08.997453 6357 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:08.997491 6357 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 20:14:08.997542 6357 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 20:14:08.997554 6357 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0930 20:14:08.997456 6357 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0930 20:14:08.997653 6357 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:11Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.954963 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:11Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.965814 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.965885 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.965900 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.965925 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.965941 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:11Z","lastTransitionTime":"2025-09-30T20:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.967683 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.967713 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.967721 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.967733 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.967742 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:11Z","lastTransitionTime":"2025-09-30T20:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.978798 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:11Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.979015 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" event={"ID":"4930c0a7-d9e1-447d-945c-7d44124b6340","Type":"ContainerStarted","Data":"609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2"} Sep 30 20:14:11 crc kubenswrapper[4919]: E0930 20:14:11.982891 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:11Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.990053 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.990133 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.990156 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.990184 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.990204 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:11Z","lastTransitionTime":"2025-09-30T20:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:11 crc kubenswrapper[4919]: I0930 20:14:11.998011 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:11Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: E0930 20:14:12.004686 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.007943 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.007979 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.007990 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.008011 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.008022 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:12Z","lastTransitionTime":"2025-09-30T20:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.011003 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.018033 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pp9g2\" (UniqueName: \"kubernetes.io/projected/c0624d31-70fc-4d66-a31b-4e67896ab40e-kube-api-access-pp9g2\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.018162 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:12 crc kubenswrapper[4919]: E0930 20:14:12.018315 4919 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:12 crc kubenswrapper[4919]: E0930 20:14:12.018376 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs podName:c0624d31-70fc-4d66-a31b-4e67896ab40e nodeName:}" failed. No retries permitted until 2025-09-30 20:14:12.518359938 +0000 UTC m=+37.634393065 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs") pod "network-metrics-daemon-bwpdf" (UID: "c0624d31-70fc-4d66-a31b-4e67896ab40e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:12 crc kubenswrapper[4919]: E0930 20:14:12.022535 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.023387 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.026315 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.026361 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.026373 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.026392 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.026665 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:12Z","lastTransitionTime":"2025-09-30T20:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.032928 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.039122 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pp9g2\" (UniqueName: \"kubernetes.io/projected/c0624d31-70fc-4d66-a31b-4e67896ab40e-kube-api-access-pp9g2\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.045351 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: E0930 20:14:12.045647 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.049816 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.049841 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.049850 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.049866 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.049878 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:12Z","lastTransitionTime":"2025-09-30T20:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.059955 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: E0930 20:14:12.062399 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: E0930 20:14:12.062504 4919 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.068144 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.068168 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.068176 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.068194 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.068206 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:12Z","lastTransitionTime":"2025-09-30T20:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.072287 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.081643 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.093478 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.104010 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.115908 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.130715 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.149381 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:07Z\\\",\\\"message\\\":\\\"930 20:14:07.527497 6237 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 20:14:07.528917 6237 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 20:14:07.528952 6237 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 20:14:07.529012 6237 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 20:14:07.529083 6237 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 20:14:07.529124 6237 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:07.529131 6237 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:07.529205 6237 factory.go:656] Stopping watch factory\\\\nI0930 20:14:07.529269 6237 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:07.529312 6237 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 20:14:07.529330 6237 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:07.529341 6237 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:07.529352 6237 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:07.529364 6237 handler.go:208] Removed *v1.Node event handler 2\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:09Z\\\",\\\"message\\\":\\\"997355 6357 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 20:14:08.997372 6357 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:08.997382 6357 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:08.997403 6357 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 20:14:08.997401 6357 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:08.997421 6357 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:08.997434 6357 factory.go:656] Stopping watch factory\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:08.997453 6357 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:08.997491 6357 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 20:14:08.997542 6357 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 20:14:08.997554 6357 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0930 20:14:08.997456 6357 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0930 20:14:08.997653 6357 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.165801 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.170767 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.170968 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.171120 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.171314 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.171467 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:12Z","lastTransitionTime":"2025-09-30T20:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.187158 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.201602 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.216772 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.229461 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.254314 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.274637 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.275011 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.275173 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.275368 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.280789 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:12Z","lastTransitionTime":"2025-09-30T20:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.301980 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.321062 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.334803 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.361508 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.379470 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.384263 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.384306 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.384322 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.384346 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.384373 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:12Z","lastTransitionTime":"2025-09-30T20:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.391801 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:12Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.487775 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.487819 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.487829 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.487849 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.487864 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:12Z","lastTransitionTime":"2025-09-30T20:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.522840 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:12 crc kubenswrapper[4919]: E0930 20:14:12.523087 4919 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:12 crc kubenswrapper[4919]: E0930 20:14:12.523237 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs podName:c0624d31-70fc-4d66-a31b-4e67896ab40e nodeName:}" failed. No retries permitted until 2025-09-30 20:14:13.523192553 +0000 UTC m=+38.639225680 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs") pod "network-metrics-daemon-bwpdf" (UID: "c0624d31-70fc-4d66-a31b-4e67896ab40e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.591500 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.591560 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.591569 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.591588 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.591601 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:12Z","lastTransitionTime":"2025-09-30T20:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.632140 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.632195 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:12 crc kubenswrapper[4919]: E0930 20:14:12.632341 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.632208 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:12 crc kubenswrapper[4919]: E0930 20:14:12.632518 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:12 crc kubenswrapper[4919]: E0930 20:14:12.632585 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.694804 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.694856 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.694869 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.694890 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.694904 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:12Z","lastTransitionTime":"2025-09-30T20:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.797658 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.797720 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.797735 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.797755 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.797768 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:12Z","lastTransitionTime":"2025-09-30T20:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.900447 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.900488 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.900500 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.900518 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:12 crc kubenswrapper[4919]: I0930 20:14:12.900529 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:12Z","lastTransitionTime":"2025-09-30T20:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.002852 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.002895 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.002903 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.002920 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.002931 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:13Z","lastTransitionTime":"2025-09-30T20:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.105483 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.105520 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.105529 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.105545 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.105557 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:13Z","lastTransitionTime":"2025-09-30T20:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.208789 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.208846 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.208855 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.208875 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.208887 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:13Z","lastTransitionTime":"2025-09-30T20:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.312396 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.312455 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.312467 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.312489 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.312505 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:13Z","lastTransitionTime":"2025-09-30T20:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.416078 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.416150 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.416176 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.416208 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.416266 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:13Z","lastTransitionTime":"2025-09-30T20:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.519518 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.519579 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.519589 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.519624 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.519637 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:13Z","lastTransitionTime":"2025-09-30T20:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.535336 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:13 crc kubenswrapper[4919]: E0930 20:14:13.535515 4919 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:13 crc kubenswrapper[4919]: E0930 20:14:13.535604 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs podName:c0624d31-70fc-4d66-a31b-4e67896ab40e nodeName:}" failed. No retries permitted until 2025-09-30 20:14:15.535579211 +0000 UTC m=+40.651612378 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs") pod "network-metrics-daemon-bwpdf" (UID: "c0624d31-70fc-4d66-a31b-4e67896ab40e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.622865 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.622925 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.622942 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.622968 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.622990 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:13Z","lastTransitionTime":"2025-09-30T20:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.631347 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:13 crc kubenswrapper[4919]: E0930 20:14:13.631771 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.726392 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.726461 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.726498 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.726528 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.726556 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:13Z","lastTransitionTime":"2025-09-30T20:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.829601 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.829658 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.829677 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.829703 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.829721 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:13Z","lastTransitionTime":"2025-09-30T20:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.932685 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.932747 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.932759 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.932780 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:13 crc kubenswrapper[4919]: I0930 20:14:13.932792 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:13Z","lastTransitionTime":"2025-09-30T20:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.036404 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.036522 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.036542 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.036568 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.036586 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:14Z","lastTransitionTime":"2025-09-30T20:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.140245 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.140318 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.140342 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.140372 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.140396 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:14Z","lastTransitionTime":"2025-09-30T20:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.243344 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.243417 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.243439 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.243467 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.243487 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:14Z","lastTransitionTime":"2025-09-30T20:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.346048 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.346105 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.346120 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.346140 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.346154 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:14Z","lastTransitionTime":"2025-09-30T20:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.449069 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.449120 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.449131 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.449152 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.449166 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:14Z","lastTransitionTime":"2025-09-30T20:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.552902 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.552946 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.552958 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.552975 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.552985 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:14Z","lastTransitionTime":"2025-09-30T20:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.632107 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.632135 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.632280 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:14 crc kubenswrapper[4919]: E0930 20:14:14.632447 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:14 crc kubenswrapper[4919]: E0930 20:14:14.632589 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:14 crc kubenswrapper[4919]: E0930 20:14:14.632659 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.656159 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.656283 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.656301 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.656329 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.656347 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:14Z","lastTransitionTime":"2025-09-30T20:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.759118 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.759161 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.759171 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.759191 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.759204 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:14Z","lastTransitionTime":"2025-09-30T20:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.863277 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.863360 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.863379 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.863406 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.863423 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:14Z","lastTransitionTime":"2025-09-30T20:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.966579 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.966662 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.966680 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.966707 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:14 crc kubenswrapper[4919]: I0930 20:14:14.966724 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:14Z","lastTransitionTime":"2025-09-30T20:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.070505 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.070569 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.070582 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.070605 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.070623 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:15Z","lastTransitionTime":"2025-09-30T20:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.174329 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.174421 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.174445 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.174482 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.174509 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:15Z","lastTransitionTime":"2025-09-30T20:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.277299 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.277336 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.277345 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.277362 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.277372 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:15Z","lastTransitionTime":"2025-09-30T20:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.380755 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.380817 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.380829 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.380848 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.380863 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:15Z","lastTransitionTime":"2025-09-30T20:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.484485 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.484558 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.484577 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.484604 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.484626 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:15Z","lastTransitionTime":"2025-09-30T20:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.558603 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:15 crc kubenswrapper[4919]: E0930 20:14:15.558754 4919 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:15 crc kubenswrapper[4919]: E0930 20:14:15.558833 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs podName:c0624d31-70fc-4d66-a31b-4e67896ab40e nodeName:}" failed. No retries permitted until 2025-09-30 20:14:19.558814554 +0000 UTC m=+44.674847681 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs") pod "network-metrics-daemon-bwpdf" (UID: "c0624d31-70fc-4d66-a31b-4e67896ab40e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.587926 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.587982 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.587995 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.588016 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.588031 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:15Z","lastTransitionTime":"2025-09-30T20:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.631494 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:15 crc kubenswrapper[4919]: E0930 20:14:15.631706 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.654832 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.670810 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.683800 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.691565 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.691639 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.691679 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.691711 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.691733 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:15Z","lastTransitionTime":"2025-09-30T20:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.696997 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.717996 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.741519 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6261a1cfa8d1056ba654d922fc494a6dfca082fdecf321a015721046b9f41d07\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:07Z\\\",\\\"message\\\":\\\"930 20:14:07.527497 6237 reflector.go:311] Stopping reflector *v1.ClusterUserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0930 20:14:07.528917 6237 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0930 20:14:07.528952 6237 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0930 20:14:07.529012 6237 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0930 20:14:07.529083 6237 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0930 20:14:07.529124 6237 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:07.529131 6237 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:07.529205 6237 factory.go:656] Stopping watch factory\\\\nI0930 20:14:07.529269 6237 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:07.529312 6237 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0930 20:14:07.529330 6237 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:07.529341 6237 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:07.529352 6237 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:07.529364 6237 handler.go:208] Removed *v1.Node event handler 2\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:09Z\\\",\\\"message\\\":\\\"997355 6357 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 20:14:08.997372 6357 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:08.997382 6357 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:08.997403 6357 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 20:14:08.997401 6357 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:08.997421 6357 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:08.997434 6357 factory.go:656] Stopping watch factory\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:08.997453 6357 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:08.997491 6357 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 20:14:08.997542 6357 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 20:14:08.997554 6357 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0930 20:14:08.997456 6357 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0930 20:14:08.997653 6357 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.759136 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.775181 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.790565 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.794320 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.794374 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.794384 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.794405 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.794418 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:15Z","lastTransitionTime":"2025-09-30T20:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.806236 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.824594 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.842683 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.856577 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.871463 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.889377 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.897428 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.897476 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.897494 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.897519 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.897536 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:15Z","lastTransitionTime":"2025-09-30T20:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.905282 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:15 crc kubenswrapper[4919]: I0930 20:14:15.916971 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:15Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.002320 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.002385 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.002406 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.002433 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.002451 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:16Z","lastTransitionTime":"2025-09-30T20:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.105857 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.105906 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.105920 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.105943 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.105958 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:16Z","lastTransitionTime":"2025-09-30T20:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.209623 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.209703 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.209729 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.209761 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.209786 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:16Z","lastTransitionTime":"2025-09-30T20:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.313428 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.313491 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.313509 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.313533 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.313552 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:16Z","lastTransitionTime":"2025-09-30T20:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.417122 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.417206 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.417261 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.417291 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.417312 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:16Z","lastTransitionTime":"2025-09-30T20:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.520191 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.520304 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.520328 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.520361 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.520385 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:16Z","lastTransitionTime":"2025-09-30T20:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.623806 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.623866 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.623882 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.623906 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.623922 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:16Z","lastTransitionTime":"2025-09-30T20:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.631684 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.631710 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:16 crc kubenswrapper[4919]: E0930 20:14:16.631842 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.631710 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:16 crc kubenswrapper[4919]: E0930 20:14:16.632108 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:16 crc kubenswrapper[4919]: E0930 20:14:16.632288 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.730418 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.730478 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.730498 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.730523 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.730541 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:16Z","lastTransitionTime":"2025-09-30T20:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.834552 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.834610 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.834629 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.834652 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.834670 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:16Z","lastTransitionTime":"2025-09-30T20:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.937308 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.937344 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.937353 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.937367 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:16 crc kubenswrapper[4919]: I0930 20:14:16.937376 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:16Z","lastTransitionTime":"2025-09-30T20:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.040344 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.040403 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.040420 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.040446 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.040463 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:17Z","lastTransitionTime":"2025-09-30T20:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.144006 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.144067 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.144084 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.144108 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.144126 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:17Z","lastTransitionTime":"2025-09-30T20:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.246923 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.246970 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.246985 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.247007 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.247020 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:17Z","lastTransitionTime":"2025-09-30T20:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.349881 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.349944 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.349962 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.349985 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.350003 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:17Z","lastTransitionTime":"2025-09-30T20:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.453337 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.453406 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.453423 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.453451 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.453468 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:17Z","lastTransitionTime":"2025-09-30T20:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.556076 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.556137 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.556155 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.556181 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.556200 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:17Z","lastTransitionTime":"2025-09-30T20:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.631879 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:17 crc kubenswrapper[4919]: E0930 20:14:17.632071 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.659353 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.659414 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.659431 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.659458 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.659482 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:17Z","lastTransitionTime":"2025-09-30T20:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.763054 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.763109 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.763127 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.763152 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.763171 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:17Z","lastTransitionTime":"2025-09-30T20:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.866524 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.866585 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.866602 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.866627 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.866645 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:17Z","lastTransitionTime":"2025-09-30T20:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.970530 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.970594 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.970615 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.970641 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:17 crc kubenswrapper[4919]: I0930 20:14:17.970657 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:17Z","lastTransitionTime":"2025-09-30T20:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.073468 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.073521 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.073536 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.073555 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.073568 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:18Z","lastTransitionTime":"2025-09-30T20:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.176132 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.176180 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.176190 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.176208 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.176235 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:18Z","lastTransitionTime":"2025-09-30T20:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.279664 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.279709 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.279720 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.279740 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.279751 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:18Z","lastTransitionTime":"2025-09-30T20:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.382592 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.382645 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.382653 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.382671 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.382684 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:18Z","lastTransitionTime":"2025-09-30T20:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.485394 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.485454 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.485471 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.485496 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.485513 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:18Z","lastTransitionTime":"2025-09-30T20:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.587911 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.587948 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.587956 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.587973 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.587982 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:18Z","lastTransitionTime":"2025-09-30T20:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.631557 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.631578 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.631779 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:18 crc kubenswrapper[4919]: E0930 20:14:18.632069 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:18 crc kubenswrapper[4919]: E0930 20:14:18.632248 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:18 crc kubenswrapper[4919]: E0930 20:14:18.632424 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.691595 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.691660 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.691676 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.691702 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.691721 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:18Z","lastTransitionTime":"2025-09-30T20:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.795381 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.795444 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.795462 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.795488 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.795507 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:18Z","lastTransitionTime":"2025-09-30T20:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.898805 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.898877 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.898891 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.898910 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:18 crc kubenswrapper[4919]: I0930 20:14:18.898923 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:18Z","lastTransitionTime":"2025-09-30T20:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.001529 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.001581 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.001592 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.001613 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.001627 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:19Z","lastTransitionTime":"2025-09-30T20:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.104521 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.104586 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.104613 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.104639 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.104658 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:19Z","lastTransitionTime":"2025-09-30T20:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.206740 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.206795 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.206814 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.206840 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.206860 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:19Z","lastTransitionTime":"2025-09-30T20:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.310045 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.310123 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.310146 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.310176 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.310202 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:19Z","lastTransitionTime":"2025-09-30T20:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.413377 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.413429 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.413445 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.413470 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.413487 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:19Z","lastTransitionTime":"2025-09-30T20:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.516944 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.516995 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.517011 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.517037 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.517055 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:19Z","lastTransitionTime":"2025-09-30T20:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.617051 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:19 crc kubenswrapper[4919]: E0930 20:14:19.617396 4919 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:19 crc kubenswrapper[4919]: E0930 20:14:19.617574 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs podName:c0624d31-70fc-4d66-a31b-4e67896ab40e nodeName:}" failed. No retries permitted until 2025-09-30 20:14:27.617528554 +0000 UTC m=+52.733561711 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs") pod "network-metrics-daemon-bwpdf" (UID: "c0624d31-70fc-4d66-a31b-4e67896ab40e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.620446 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.620491 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.620502 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.620527 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.620538 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:19Z","lastTransitionTime":"2025-09-30T20:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.631601 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:19 crc kubenswrapper[4919]: E0930 20:14:19.631813 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.724517 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.724604 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.724615 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.724633 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.724643 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:19Z","lastTransitionTime":"2025-09-30T20:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.827761 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.827826 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.827835 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.827871 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.827886 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:19Z","lastTransitionTime":"2025-09-30T20:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.931594 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.931679 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.931702 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.931736 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:19 crc kubenswrapper[4919]: I0930 20:14:19.931754 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:19Z","lastTransitionTime":"2025-09-30T20:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.034643 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.034777 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.034799 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.034868 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.034885 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:20Z","lastTransitionTime":"2025-09-30T20:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.138468 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.138545 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.138565 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.138599 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.138623 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:20Z","lastTransitionTime":"2025-09-30T20:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.242340 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.242403 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.242414 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.242433 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.242446 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:20Z","lastTransitionTime":"2025-09-30T20:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.345764 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.345883 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.345907 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.345937 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.345965 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:20Z","lastTransitionTime":"2025-09-30T20:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.449153 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.449258 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.449278 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.449304 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.449321 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:20Z","lastTransitionTime":"2025-09-30T20:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.552716 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.552827 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.552851 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.552881 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.552903 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:20Z","lastTransitionTime":"2025-09-30T20:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.632156 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.632321 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:20 crc kubenswrapper[4919]: E0930 20:14:20.632384 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:20 crc kubenswrapper[4919]: E0930 20:14:20.632567 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.632607 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:20 crc kubenswrapper[4919]: E0930 20:14:20.632801 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.655689 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.655744 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.655761 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.655785 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.655803 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:20Z","lastTransitionTime":"2025-09-30T20:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.759122 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.759198 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.759208 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.759244 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.759258 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:20Z","lastTransitionTime":"2025-09-30T20:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.862025 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.862081 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.862092 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.862110 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.862123 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:20Z","lastTransitionTime":"2025-09-30T20:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.965928 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.965997 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.966007 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.966031 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:20 crc kubenswrapper[4919]: I0930 20:14:20.966043 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:20Z","lastTransitionTime":"2025-09-30T20:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.091783 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.091856 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.091869 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.091891 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.091904 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:21Z","lastTransitionTime":"2025-09-30T20:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.210165 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.210252 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.210265 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.210284 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.210296 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:21Z","lastTransitionTime":"2025-09-30T20:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.313588 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.313637 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.313652 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.313670 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.313680 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:21Z","lastTransitionTime":"2025-09-30T20:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.417122 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.417264 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.417284 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.417372 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.417391 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:21Z","lastTransitionTime":"2025-09-30T20:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.520920 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.520985 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.521003 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.521080 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.521103 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:21Z","lastTransitionTime":"2025-09-30T20:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.624202 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.624297 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.624315 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.624342 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.624360 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:21Z","lastTransitionTime":"2025-09-30T20:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.631660 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:21 crc kubenswrapper[4919]: E0930 20:14:21.631876 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.633307 4919 scope.go:117] "RemoveContainer" containerID="a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.655856 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.678347 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.697257 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.718694 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.727119 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.727181 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.727208 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.727286 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.727314 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:21Z","lastTransitionTime":"2025-09-30T20:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.743354 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.758175 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.770903 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.787977 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.806194 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.830257 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.830440 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.830508 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.830533 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.830567 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.830591 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:21Z","lastTransitionTime":"2025-09-30T20:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.855470 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.881286 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.906371 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.929001 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.933889 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.933983 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.934021 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.934048 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.934062 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:21Z","lastTransitionTime":"2025-09-30T20:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.953947 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:21 crc kubenswrapper[4919]: I0930 20:14:21.981996 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:09Z\\\",\\\"message\\\":\\\"997355 6357 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 20:14:08.997372 6357 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:08.997382 6357 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:08.997403 6357 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 20:14:08.997401 6357 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:08.997421 6357 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:08.997434 6357 factory.go:656] Stopping watch factory\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:08.997453 6357 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:08.997491 6357 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 20:14:08.997542 6357 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 20:14:08.997554 6357 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0930 20:14:08.997456 6357 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0930 20:14:08.997653 6357 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.001878 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:21Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.019382 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.023766 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/1.log" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.028206 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.028928 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.039403 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.039453 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.039466 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.039487 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.039502 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.047294 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.062907 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.081722 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.106771 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:09Z\\\",\\\"message\\\":\\\"997355 6357 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 20:14:08.997372 6357 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:08.997382 6357 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:08.997403 6357 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 20:14:08.997401 6357 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:08.997421 6357 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:08.997434 6357 factory.go:656] Stopping watch factory\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:08.997453 6357 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:08.997491 6357 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 20:14:08.997542 6357 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 20:14:08.997554 6357 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0930 20:14:08.997456 6357 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0930 20:14:08.997653 6357 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.123619 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.133808 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.133861 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.133876 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.134114 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.134136 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.141992 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: E0930 20:14:22.159940 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.165197 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.165245 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.165254 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.165270 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.165280 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.167709 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.211292 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: E0930 20:14:22.211350 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.216722 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.216765 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.216777 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.216797 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.216808 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.231944 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: E0930 20:14:22.235853 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.242073 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.242107 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.242116 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.242131 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.242142 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.248357 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: E0930 20:14:22.257634 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.261310 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.263373 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.263424 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.263433 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.263453 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.263465 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.266267 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.272282 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 30 20:14:22 crc kubenswrapper[4919]: E0930 20:14:22.278771 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: E0930 20:14:22.278890 4919 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.280672 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.280725 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.280740 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.280762 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.280777 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.285259 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.296722 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.306718 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.332307 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.346846 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.363346 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.377768 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.383560 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.383608 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.383622 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.383642 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.383655 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.398305 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.422656 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.437815 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.451197 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.473130 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:09Z\\\",\\\"message\\\":\\\"997355 6357 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 20:14:08.997372 6357 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:08.997382 6357 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:08.997403 6357 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 20:14:08.997401 6357 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:08.997421 6357 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:08.997434 6357 factory.go:656] Stopping watch factory\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:08.997453 6357 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:08.997491 6357 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 20:14:08.997542 6357 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 20:14:08.997554 6357 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0930 20:14:08.997456 6357 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0930 20:14:08.997653 6357 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.485650 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.485702 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.485715 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.485737 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.485755 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.489463 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.505011 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.515759 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.528392 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.538908 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.552017 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.568234 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.579800 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.588741 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.588804 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.588817 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.588839 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.588853 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.590425 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.612018 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.627986 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.631148 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.631183 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.631198 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:22 crc kubenswrapper[4919]: E0930 20:14:22.631339 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:22 crc kubenswrapper[4919]: E0930 20:14:22.631434 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:22 crc kubenswrapper[4919]: E0930 20:14:22.631515 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.640791 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.691497 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.691543 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.691552 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.691571 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.691584 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.795322 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.795410 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.795428 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.795460 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.795480 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.898040 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.898085 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.898093 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.898109 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:22 crc kubenswrapper[4919]: I0930 20:14:22.898123 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:22Z","lastTransitionTime":"2025-09-30T20:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.001706 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.001788 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.001806 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.001833 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.001855 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:23Z","lastTransitionTime":"2025-09-30T20:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.035086 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/2.log" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.035869 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/1.log" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.038623 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001" exitCode=1 Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.038699 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001"} Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.038770 4919 scope.go:117] "RemoveContainer" containerID="a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.041440 4919 scope.go:117] "RemoveContainer" containerID="08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001" Sep 30 20:14:23 crc kubenswrapper[4919]: E0930 20:14:23.041886 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.063378 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.085982 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.104746 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.104787 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.104795 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.104814 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.104824 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:23Z","lastTransitionTime":"2025-09-30T20:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.105121 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.125651 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.142783 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.159776 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.174410 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.205693 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a7b501372ed14722202eae6ef1993a4ea891d68f777bf0bffe4ff6b833e3bc7b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:09Z\\\",\\\"message\\\":\\\"997355 6357 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0930 20:14:08.997372 6357 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0930 20:14:08.997382 6357 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0930 20:14:08.997403 6357 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0930 20:14:08.997401 6357 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0930 20:14:08.997421 6357 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0930 20:14:08.997434 6357 factory.go:656] Stopping watch factory\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Node event handler 2\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0930 20:14:08.997473 6357 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0930 20:14:08.997453 6357 ovnkube.go:599] Stopped ovnkube\\\\nI0930 20:14:08.997491 6357 handler.go:208] Removed *v1.Node event handler 7\\\\nI0930 20:14:08.997450 6357 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0930 20:14:08.997542 6357 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0930 20:14:08.997554 6357 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0930 20:14:08.997456 6357 handler.go:208] Removed *v1.Namespace event handler 1\\\\nF0930 20:14:08.997653 6357 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"ase_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF0930 20:14:22.642533 6569 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z]\\\\nI0930 20:14:22.642532 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-controllers]} name:Service_openshift-machine-api/machine-api-controllers_TCP_cluster opt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.216245 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.216510 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.216762 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.216977 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.217185 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:23Z","lastTransitionTime":"2025-09-30T20:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.226729 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.245345 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.266200 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.285049 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.300549 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.320577 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.320612 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.320624 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.320645 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.320659 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:23Z","lastTransitionTime":"2025-09-30T20:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.332257 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.352248 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.369501 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.387436 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.407128 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:23Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.423770 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.423833 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.423847 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.423870 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.423884 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:23Z","lastTransitionTime":"2025-09-30T20:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.527149 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.527204 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.527270 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.527302 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.527321 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:23Z","lastTransitionTime":"2025-09-30T20:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.630499 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.630593 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.630617 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.630654 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.630681 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:23Z","lastTransitionTime":"2025-09-30T20:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.631293 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:23 crc kubenswrapper[4919]: E0930 20:14:23.631502 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.734546 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.734601 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.734620 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.734648 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.734671 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:23Z","lastTransitionTime":"2025-09-30T20:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.838614 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.838661 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.838674 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.838695 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.838709 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:23Z","lastTransitionTime":"2025-09-30T20:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.941676 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.941731 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.941745 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.941765 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:23 crc kubenswrapper[4919]: I0930 20:14:23.941777 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:23Z","lastTransitionTime":"2025-09-30T20:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.045394 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.045481 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.045502 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.045530 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.045548 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:24Z","lastTransitionTime":"2025-09-30T20:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.045707 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/2.log" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.050587 4919 scope.go:117] "RemoveContainer" containerID="08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001" Sep 30 20:14:24 crc kubenswrapper[4919]: E0930 20:14:24.050860 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.068842 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.085062 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.104877 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.120338 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.136859 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.148301 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.148484 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.148582 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.148654 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.148722 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:24Z","lastTransitionTime":"2025-09-30T20:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.152517 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.178630 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"ase_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF0930 20:14:22.642533 6569 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z]\\\\nI0930 20:14:22.642532 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-controllers]} name:Service_openshift-machine-api/machine-api-controllers_TCP_cluster opt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.197306 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.211891 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.233968 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.252272 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.252362 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.252389 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.252428 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.252454 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:24Z","lastTransitionTime":"2025-09-30T20:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.253056 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.268650 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.287122 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.320570 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.340358 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.356094 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.356636 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.356691 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.356709 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.356737 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.356755 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:24Z","lastTransitionTime":"2025-09-30T20:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.374672 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.391857 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:24Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.460115 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.460168 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.460185 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.460207 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.460255 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:24Z","lastTransitionTime":"2025-09-30T20:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.562682 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.562757 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.562771 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.562789 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.562800 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:24Z","lastTransitionTime":"2025-09-30T20:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.632164 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.632185 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:24 crc kubenswrapper[4919]: E0930 20:14:24.632420 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.632184 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:24 crc kubenswrapper[4919]: E0930 20:14:24.632708 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:24 crc kubenswrapper[4919]: E0930 20:14:24.632789 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.666659 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.666739 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.666762 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.666800 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.666819 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:24Z","lastTransitionTime":"2025-09-30T20:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.770107 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.770190 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.770210 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.770314 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.770334 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:24Z","lastTransitionTime":"2025-09-30T20:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.874342 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.874405 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.874431 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.874461 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.874488 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:24Z","lastTransitionTime":"2025-09-30T20:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.977476 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.977534 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.977550 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.977578 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:24 crc kubenswrapper[4919]: I0930 20:14:24.977596 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:24Z","lastTransitionTime":"2025-09-30T20:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.080252 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.080324 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.080349 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.080383 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.080404 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:25Z","lastTransitionTime":"2025-09-30T20:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.183611 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.183660 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.183673 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.183697 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.183711 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:25Z","lastTransitionTime":"2025-09-30T20:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.286772 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.286852 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.286863 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.286889 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.286905 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:25Z","lastTransitionTime":"2025-09-30T20:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.391056 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.391136 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.391152 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.391183 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.391204 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:25Z","lastTransitionTime":"2025-09-30T20:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.494481 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.494548 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.494565 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.494594 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.494612 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:25Z","lastTransitionTime":"2025-09-30T20:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.598324 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.598394 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.598418 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.598446 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.598465 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:25Z","lastTransitionTime":"2025-09-30T20:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.632127 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:25 crc kubenswrapper[4919]: E0930 20:14:25.632367 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.659597 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.687269 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.701924 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.701982 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.701999 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.702023 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.702043 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:25Z","lastTransitionTime":"2025-09-30T20:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.707037 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.726430 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.766250 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.786843 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.802147 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.809904 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.810193 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.810208 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.814177 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.814301 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:25Z","lastTransitionTime":"2025-09-30T20:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.823002 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.848696 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.890533 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.916642 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.916692 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.916703 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.916735 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.916744 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:25Z","lastTransitionTime":"2025-09-30T20:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.929182 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"ase_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF0930 20:14:22.642533 6569 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z]\\\\nI0930 20:14:22.642532 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-controllers]} name:Service_openshift-machine-api/machine-api-controllers_TCP_cluster opt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.947492 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.962379 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.977796 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:25 crc kubenswrapper[4919]: I0930 20:14:25.992579 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:25Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.011118 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:26Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.019686 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.019739 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.019766 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.019794 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.019813 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:26Z","lastTransitionTime":"2025-09-30T20:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.027909 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:26Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.044293 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:26Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.123621 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.123675 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.123686 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.123707 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.123721 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:26Z","lastTransitionTime":"2025-09-30T20:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.227643 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.227716 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.227736 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.227764 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.227784 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:26Z","lastTransitionTime":"2025-09-30T20:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.331063 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.331132 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.331145 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.331164 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.331177 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:26Z","lastTransitionTime":"2025-09-30T20:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.434508 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.434583 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.434603 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.434628 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.434648 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:26Z","lastTransitionTime":"2025-09-30T20:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.537533 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.537596 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.537609 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.537631 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.537647 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:26Z","lastTransitionTime":"2025-09-30T20:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.631993 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.632107 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.632353 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:26 crc kubenswrapper[4919]: E0930 20:14:26.632472 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:26 crc kubenswrapper[4919]: E0930 20:14:26.632704 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:26 crc kubenswrapper[4919]: E0930 20:14:26.632856 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.641097 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.641152 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.641171 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.641196 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.641240 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:26Z","lastTransitionTime":"2025-09-30T20:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.744778 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.744849 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.744870 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.744901 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.744920 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:26Z","lastTransitionTime":"2025-09-30T20:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.848031 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.848178 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.848198 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.848258 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.848279 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:26Z","lastTransitionTime":"2025-09-30T20:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.951859 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.951922 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.951940 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.951964 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:26 crc kubenswrapper[4919]: I0930 20:14:26.951982 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:26Z","lastTransitionTime":"2025-09-30T20:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.008769 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.009054 4919 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.009204 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:59.009170441 +0000 UTC m=+84.125203608 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.055641 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.055679 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.055692 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.055714 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.055726 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:27Z","lastTransitionTime":"2025-09-30T20:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.109901 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.109982 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:14:59.109927398 +0000 UTC m=+84.225960565 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.110256 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.110382 4919 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.110392 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.110481 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:59.110451523 +0000 UTC m=+84.226484690 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.110541 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.110589 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.110594 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.110648 4919 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.110722 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:59.11069826 +0000 UTC m=+84.226731487 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.110947 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.111022 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.111051 4919 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.111169 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 20:14:59.111138833 +0000 UTC m=+84.227172000 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.158567 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.158654 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.158669 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.158698 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.158715 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:27Z","lastTransitionTime":"2025-09-30T20:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.261653 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.261731 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.261760 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.261794 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.261817 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:27Z","lastTransitionTime":"2025-09-30T20:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.366099 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.366200 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.366241 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.366273 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.366291 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:27Z","lastTransitionTime":"2025-09-30T20:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.469732 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.469790 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.469807 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.469831 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.469848 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:27Z","lastTransitionTime":"2025-09-30T20:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.572522 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.572618 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.572685 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.572713 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.572728 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:27Z","lastTransitionTime":"2025-09-30T20:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.631814 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.631996 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.676136 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.676200 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.676225 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.676247 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.676258 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:27Z","lastTransitionTime":"2025-09-30T20:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.718814 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.719074 4919 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:27 crc kubenswrapper[4919]: E0930 20:14:27.719262 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs podName:c0624d31-70fc-4d66-a31b-4e67896ab40e nodeName:}" failed. No retries permitted until 2025-09-30 20:14:43.719201166 +0000 UTC m=+68.835234513 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs") pod "network-metrics-daemon-bwpdf" (UID: "c0624d31-70fc-4d66-a31b-4e67896ab40e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.779805 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.779878 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.779893 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.779921 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.779937 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:27Z","lastTransitionTime":"2025-09-30T20:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.883740 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.883801 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.883821 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.883848 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.883865 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:27Z","lastTransitionTime":"2025-09-30T20:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.986418 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.986481 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.986498 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.986520 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:27 crc kubenswrapper[4919]: I0930 20:14:27.986537 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:27Z","lastTransitionTime":"2025-09-30T20:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.089366 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.089441 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.089463 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.089497 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.089519 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:28Z","lastTransitionTime":"2025-09-30T20:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.192692 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.192756 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.192775 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.192801 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.192818 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:28Z","lastTransitionTime":"2025-09-30T20:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.295251 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.295288 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.295295 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.295311 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.295324 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:28Z","lastTransitionTime":"2025-09-30T20:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.398756 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.398819 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.398834 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.398943 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.398959 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:28Z","lastTransitionTime":"2025-09-30T20:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.503542 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.503607 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.503625 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.503652 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.503672 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:28Z","lastTransitionTime":"2025-09-30T20:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.607531 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.607598 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.607613 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.607667 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.607681 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:28Z","lastTransitionTime":"2025-09-30T20:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.631698 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.631864 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.632031 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:28 crc kubenswrapper[4919]: E0930 20:14:28.632457 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:28 crc kubenswrapper[4919]: E0930 20:14:28.632578 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:28 crc kubenswrapper[4919]: E0930 20:14:28.632619 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.711370 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.711462 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.711489 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.711524 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.711548 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:28Z","lastTransitionTime":"2025-09-30T20:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.815086 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.815148 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.815167 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.815193 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.815247 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:28Z","lastTransitionTime":"2025-09-30T20:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.918670 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.918915 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.919061 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.919203 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:28 crc kubenswrapper[4919]: I0930 20:14:28.919405 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:28Z","lastTransitionTime":"2025-09-30T20:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.023411 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.023478 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.023500 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.023528 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.023548 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:29Z","lastTransitionTime":"2025-09-30T20:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.126957 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.127022 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.127042 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.127069 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.127087 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:29Z","lastTransitionTime":"2025-09-30T20:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.230914 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.231002 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.231022 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.231044 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.231098 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:29Z","lastTransitionTime":"2025-09-30T20:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.335306 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.335369 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.335383 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.335403 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.335417 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:29Z","lastTransitionTime":"2025-09-30T20:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.438506 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.438564 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.438574 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.438598 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.438616 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:29Z","lastTransitionTime":"2025-09-30T20:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.541132 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.541190 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.541203 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.541282 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.541297 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:29Z","lastTransitionTime":"2025-09-30T20:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.632245 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:29 crc kubenswrapper[4919]: E0930 20:14:29.632492 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.643931 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.643995 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.644020 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.644049 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.644067 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:29Z","lastTransitionTime":"2025-09-30T20:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.746916 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.746989 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.747006 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.747031 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.747054 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:29Z","lastTransitionTime":"2025-09-30T20:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.850352 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.850415 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.850437 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.850466 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.850487 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:29Z","lastTransitionTime":"2025-09-30T20:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.953195 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.953335 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.953356 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.953397 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:29 crc kubenswrapper[4919]: I0930 20:14:29.953419 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:29Z","lastTransitionTime":"2025-09-30T20:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.056783 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.056841 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.056857 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.056882 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.056901 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:30Z","lastTransitionTime":"2025-09-30T20:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.160291 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.160361 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.160378 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.160405 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.160423 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:30Z","lastTransitionTime":"2025-09-30T20:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.263336 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.263390 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.263406 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.263431 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.263449 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:30Z","lastTransitionTime":"2025-09-30T20:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.366331 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.366382 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.366393 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.366410 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.366423 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:30Z","lastTransitionTime":"2025-09-30T20:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.470409 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.470542 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.470566 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.470591 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.470640 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:30Z","lastTransitionTime":"2025-09-30T20:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.574018 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.574082 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.574104 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.574132 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.574152 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:30Z","lastTransitionTime":"2025-09-30T20:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.632281 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.632336 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.632449 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:30 crc kubenswrapper[4919]: E0930 20:14:30.632646 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:30 crc kubenswrapper[4919]: E0930 20:14:30.633043 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:30 crc kubenswrapper[4919]: E0930 20:14:30.633276 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.677162 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.677266 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.677291 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.677323 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.677343 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:30Z","lastTransitionTime":"2025-09-30T20:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.780844 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.780910 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.780932 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.780962 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.780987 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:30Z","lastTransitionTime":"2025-09-30T20:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.884113 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.884168 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.884191 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.884249 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.884265 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:30Z","lastTransitionTime":"2025-09-30T20:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.986586 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.986664 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.986687 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.986718 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:30 crc kubenswrapper[4919]: I0930 20:14:30.986739 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:30Z","lastTransitionTime":"2025-09-30T20:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.089897 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.089967 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.089986 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.090013 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.090037 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:31Z","lastTransitionTime":"2025-09-30T20:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.193707 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.193787 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.193810 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.193842 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.193866 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:31Z","lastTransitionTime":"2025-09-30T20:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.297204 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.297287 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.297298 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.297317 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.297331 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:31Z","lastTransitionTime":"2025-09-30T20:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.400459 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.400542 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.400584 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.400620 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.400643 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:31Z","lastTransitionTime":"2025-09-30T20:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.503717 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.503785 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.503803 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.503832 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.503853 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:31Z","lastTransitionTime":"2025-09-30T20:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.607780 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.607815 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.607824 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.607840 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.607850 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:31Z","lastTransitionTime":"2025-09-30T20:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.631898 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:31 crc kubenswrapper[4919]: E0930 20:14:31.632195 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.710341 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.710390 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.710403 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.710421 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.710432 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:31Z","lastTransitionTime":"2025-09-30T20:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.813279 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.813325 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.813336 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.813355 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.813366 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:31Z","lastTransitionTime":"2025-09-30T20:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.916411 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.916644 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.916670 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.916699 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:31 crc kubenswrapper[4919]: I0930 20:14:31.916720 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:31Z","lastTransitionTime":"2025-09-30T20:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.019729 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.019813 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.019837 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.019871 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.019894 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.122540 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.122600 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.122624 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.122657 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.122677 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.225867 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.225984 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.226008 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.226037 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.226057 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.329115 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.329189 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.329246 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.329305 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.329324 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.376170 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.376271 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.376322 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.376353 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.376371 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: E0930 20:14:32.406488 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:32Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.413372 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.413434 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.413457 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.413489 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.413513 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: E0930 20:14:32.434997 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:32Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.441095 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.441153 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.441168 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.441190 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.441205 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: E0930 20:14:32.461179 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:32Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.466791 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.466848 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.466873 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.466902 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.466924 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: E0930 20:14:32.491157 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:32Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.497185 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.497253 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.497266 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.497285 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.497298 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: E0930 20:14:32.519413 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:32Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:32 crc kubenswrapper[4919]: E0930 20:14:32.519660 4919 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.522181 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.522271 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.522295 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.522350 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.522368 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.625392 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.625494 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.625511 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.625567 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.625586 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.631745 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.631808 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:32 crc kubenswrapper[4919]: E0930 20:14:32.631908 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:32 crc kubenswrapper[4919]: E0930 20:14:32.632078 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.631825 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:32 crc kubenswrapper[4919]: E0930 20:14:32.632304 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.728620 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.728728 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.728752 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.728785 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.728808 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.831923 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.832002 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.832024 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.832056 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.832081 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.936073 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.936168 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.936200 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.936297 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:32 crc kubenswrapper[4919]: I0930 20:14:32.936328 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:32Z","lastTransitionTime":"2025-09-30T20:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.040467 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.040542 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.040568 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.040602 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.040629 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:33Z","lastTransitionTime":"2025-09-30T20:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.144267 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.144332 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.144351 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.144378 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.144397 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:33Z","lastTransitionTime":"2025-09-30T20:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.248443 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.248523 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.248546 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.248577 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.248601 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:33Z","lastTransitionTime":"2025-09-30T20:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.351031 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.351091 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.351116 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.351144 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.351164 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:33Z","lastTransitionTime":"2025-09-30T20:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.453866 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.453956 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.453977 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.454006 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.454026 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:33Z","lastTransitionTime":"2025-09-30T20:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.557726 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.557821 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.557845 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.557877 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.557900 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:33Z","lastTransitionTime":"2025-09-30T20:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.632027 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:33 crc kubenswrapper[4919]: E0930 20:14:33.632334 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.660423 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.660496 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.660514 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.660546 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.660565 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:33Z","lastTransitionTime":"2025-09-30T20:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.763585 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.763664 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.763684 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.763714 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.763733 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:33Z","lastTransitionTime":"2025-09-30T20:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.867611 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.867703 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.867721 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.867819 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.867839 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:33Z","lastTransitionTime":"2025-09-30T20:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.972331 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.972406 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.972425 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.972454 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:33 crc kubenswrapper[4919]: I0930 20:14:33.972477 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:33Z","lastTransitionTime":"2025-09-30T20:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.076431 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.076499 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.076516 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.076542 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.076559 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:34Z","lastTransitionTime":"2025-09-30T20:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.179905 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.179957 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.179971 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.179994 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.180010 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:34Z","lastTransitionTime":"2025-09-30T20:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.283871 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.283943 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.283962 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.283994 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.284015 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:34Z","lastTransitionTime":"2025-09-30T20:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.388004 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.388066 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.388086 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.388110 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.388141 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:34Z","lastTransitionTime":"2025-09-30T20:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.491729 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.491785 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.491802 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.491824 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.491836 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:34Z","lastTransitionTime":"2025-09-30T20:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.595519 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.595586 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.595604 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.595631 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.595650 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:34Z","lastTransitionTime":"2025-09-30T20:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.631259 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.631347 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:34 crc kubenswrapper[4919]: E0930 20:14:34.631451 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.631362 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:34 crc kubenswrapper[4919]: E0930 20:14:34.631560 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:34 crc kubenswrapper[4919]: E0930 20:14:34.631713 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.698699 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.698797 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.698822 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.698873 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.698892 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:34Z","lastTransitionTime":"2025-09-30T20:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.802461 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.802532 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.802548 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.802575 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.802593 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:34Z","lastTransitionTime":"2025-09-30T20:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.906057 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.906107 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.906121 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.906143 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:34 crc kubenswrapper[4919]: I0930 20:14:34.906159 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:34Z","lastTransitionTime":"2025-09-30T20:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.034535 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.034599 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.034623 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.034652 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.034674 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:35Z","lastTransitionTime":"2025-09-30T20:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.138083 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.138148 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.138159 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.138185 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.138201 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:35Z","lastTransitionTime":"2025-09-30T20:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.242022 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.242166 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.242188 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.242280 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.242305 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:35Z","lastTransitionTime":"2025-09-30T20:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.345678 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.345751 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.345771 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.345848 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.345867 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:35Z","lastTransitionTime":"2025-09-30T20:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.449381 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.449451 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.449470 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.449499 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.449517 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:35Z","lastTransitionTime":"2025-09-30T20:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.552636 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.552684 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.552700 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.552719 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.552734 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:35Z","lastTransitionTime":"2025-09-30T20:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.631380 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:35 crc kubenswrapper[4919]: E0930 20:14:35.631572 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.632944 4919 scope.go:117] "RemoveContainer" containerID="08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001" Sep 30 20:14:35 crc kubenswrapper[4919]: E0930 20:14:35.633183 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.655821 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.655882 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.655899 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.655924 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.655941 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:35Z","lastTransitionTime":"2025-09-30T20:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.664408 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.685676 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.702308 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.721556 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.743981 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.759508 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.759591 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.759609 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.759641 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.759660 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:35Z","lastTransitionTime":"2025-09-30T20:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.763144 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.780588 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.799362 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.817288 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.836414 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.856646 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.863559 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.863631 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.863655 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.863686 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.863714 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:35Z","lastTransitionTime":"2025-09-30T20:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.888348 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"ase_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF0930 20:14:22.642533 6569 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z]\\\\nI0930 20:14:22.642532 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-controllers]} name:Service_openshift-machine-api/machine-api-controllers_TCP_cluster opt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.910934 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.928484 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.955796 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.966326 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.966397 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.966419 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.966449 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.966474 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:35Z","lastTransitionTime":"2025-09-30T20:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.975306 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:35 crc kubenswrapper[4919]: I0930 20:14:35.992027 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:35Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.011386 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:36Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.070241 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.070342 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.070362 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.070396 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.070423 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:36Z","lastTransitionTime":"2025-09-30T20:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.174578 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.174650 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.174669 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.174698 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.174717 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:36Z","lastTransitionTime":"2025-09-30T20:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.278830 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.278896 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.278920 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.278952 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.278976 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:36Z","lastTransitionTime":"2025-09-30T20:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.382538 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.382606 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.382624 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.382651 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.382670 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:36Z","lastTransitionTime":"2025-09-30T20:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.485958 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.486017 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.486033 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.486058 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.486076 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:36Z","lastTransitionTime":"2025-09-30T20:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.589881 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.589957 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.589980 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.590015 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.590039 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:36Z","lastTransitionTime":"2025-09-30T20:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.631634 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.631763 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.631763 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:36 crc kubenswrapper[4919]: E0930 20:14:36.631941 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:36 crc kubenswrapper[4919]: E0930 20:14:36.632336 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:36 crc kubenswrapper[4919]: E0930 20:14:36.632537 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.693729 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.693777 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.693796 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.693822 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.693839 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:36Z","lastTransitionTime":"2025-09-30T20:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.797300 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.797364 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.797381 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.797407 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.797424 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:36Z","lastTransitionTime":"2025-09-30T20:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.900977 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.901038 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.901055 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.901084 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:36 crc kubenswrapper[4919]: I0930 20:14:36.901102 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:36Z","lastTransitionTime":"2025-09-30T20:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.005095 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.005163 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.005190 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.005254 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.005273 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:37Z","lastTransitionTime":"2025-09-30T20:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.108655 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.108717 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.108734 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.108759 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.108777 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:37Z","lastTransitionTime":"2025-09-30T20:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.212760 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.212835 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.212854 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.212911 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.212932 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:37Z","lastTransitionTime":"2025-09-30T20:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.316130 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.316199 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.316265 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.316293 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.316313 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:37Z","lastTransitionTime":"2025-09-30T20:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.420006 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.420304 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.420457 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.420543 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.420610 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:37Z","lastTransitionTime":"2025-09-30T20:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.524193 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.524919 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.525013 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.525108 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.525208 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:37Z","lastTransitionTime":"2025-09-30T20:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.628630 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.628701 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.628719 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.628748 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.628766 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:37Z","lastTransitionTime":"2025-09-30T20:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.632288 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:37 crc kubenswrapper[4919]: E0930 20:14:37.632501 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.731710 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.731773 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.731791 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.731814 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.731834 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:37Z","lastTransitionTime":"2025-09-30T20:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.834527 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.834848 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.834919 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.835001 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.835091 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:37Z","lastTransitionTime":"2025-09-30T20:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.937652 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.938069 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.938297 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.938516 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:37 crc kubenswrapper[4919]: I0930 20:14:37.938686 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:37Z","lastTransitionTime":"2025-09-30T20:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.041781 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.041871 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.041896 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.041928 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.041951 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:38Z","lastTransitionTime":"2025-09-30T20:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.144535 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.145016 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.145164 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.145384 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.145527 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:38Z","lastTransitionTime":"2025-09-30T20:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.249476 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.249886 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.250170 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.250440 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.250611 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:38Z","lastTransitionTime":"2025-09-30T20:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.353940 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.354336 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.354508 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.354687 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.354815 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:38Z","lastTransitionTime":"2025-09-30T20:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.461380 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.461921 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.462182 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.462457 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.462707 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:38Z","lastTransitionTime":"2025-09-30T20:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.566768 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.601450 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.601534 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.601590 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.601615 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:38Z","lastTransitionTime":"2025-09-30T20:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.631994 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.632056 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.632096 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:38 crc kubenswrapper[4919]: E0930 20:14:38.632201 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:38 crc kubenswrapper[4919]: E0930 20:14:38.632401 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:38 crc kubenswrapper[4919]: E0930 20:14:38.632676 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.705523 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.705605 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.705625 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.705654 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.705673 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:38Z","lastTransitionTime":"2025-09-30T20:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.809063 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.809126 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.809144 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.809172 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.809191 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:38Z","lastTransitionTime":"2025-09-30T20:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.912698 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.912795 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.912810 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.912834 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:38 crc kubenswrapper[4919]: I0930 20:14:38.912851 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:38Z","lastTransitionTime":"2025-09-30T20:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.016775 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.016850 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.016874 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.016909 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.016941 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:39Z","lastTransitionTime":"2025-09-30T20:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.120384 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.120439 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.120455 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.120480 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.120501 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:39Z","lastTransitionTime":"2025-09-30T20:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.224243 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.224300 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.224317 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.224344 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.224364 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:39Z","lastTransitionTime":"2025-09-30T20:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.327462 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.327528 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.327546 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.327573 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.327592 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:39Z","lastTransitionTime":"2025-09-30T20:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.430377 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.430452 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.430465 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.430490 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.430507 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:39Z","lastTransitionTime":"2025-09-30T20:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.534130 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.534179 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.534191 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.534209 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.534238 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:39Z","lastTransitionTime":"2025-09-30T20:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.632735 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:39 crc kubenswrapper[4919]: E0930 20:14:39.632954 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.637572 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.637612 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.637625 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.637639 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.637654 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:39Z","lastTransitionTime":"2025-09-30T20:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.740758 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.740808 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.740818 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.740835 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.740844 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:39Z","lastTransitionTime":"2025-09-30T20:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.843763 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.843835 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.843853 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.843880 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.843899 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:39Z","lastTransitionTime":"2025-09-30T20:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.947407 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.947494 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.947519 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.947555 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:39 crc kubenswrapper[4919]: I0930 20:14:39.947582 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:39Z","lastTransitionTime":"2025-09-30T20:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.050406 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.050455 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.050469 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.050495 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.050510 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:40Z","lastTransitionTime":"2025-09-30T20:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.153774 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.153845 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.153869 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.153906 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.153929 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:40Z","lastTransitionTime":"2025-09-30T20:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.256706 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.256745 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.256757 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.256775 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.256787 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:40Z","lastTransitionTime":"2025-09-30T20:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.359719 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.359793 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.359805 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.359825 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.359838 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:40Z","lastTransitionTime":"2025-09-30T20:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.462882 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.462924 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.462934 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.462948 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.462958 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:40Z","lastTransitionTime":"2025-09-30T20:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.565291 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.565344 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.565357 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.565377 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.565391 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:40Z","lastTransitionTime":"2025-09-30T20:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.631619 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:40 crc kubenswrapper[4919]: E0930 20:14:40.631737 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.631772 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:40 crc kubenswrapper[4919]: E0930 20:14:40.631841 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.631847 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:40 crc kubenswrapper[4919]: E0930 20:14:40.632053 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.668978 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.669021 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.669035 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.669052 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.669067 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:40Z","lastTransitionTime":"2025-09-30T20:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.772324 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.772372 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.772383 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.772400 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.772410 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:40Z","lastTransitionTime":"2025-09-30T20:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.875204 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.875282 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.875296 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.875317 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.875331 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:40Z","lastTransitionTime":"2025-09-30T20:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.978154 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.978243 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.978282 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.978301 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:40 crc kubenswrapper[4919]: I0930 20:14:40.978313 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:40Z","lastTransitionTime":"2025-09-30T20:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.080735 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.080774 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.080783 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.080797 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.080808 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:41Z","lastTransitionTime":"2025-09-30T20:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.183393 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.183442 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.183454 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.183471 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.183483 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:41Z","lastTransitionTime":"2025-09-30T20:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.286361 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.286417 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.286434 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.286457 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.286475 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:41Z","lastTransitionTime":"2025-09-30T20:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.389004 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.389038 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.389050 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.389065 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.389076 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:41Z","lastTransitionTime":"2025-09-30T20:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.492376 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.492413 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.492443 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.492463 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.492475 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:41Z","lastTransitionTime":"2025-09-30T20:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.594897 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.594942 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.594956 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.594972 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.594982 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:41Z","lastTransitionTime":"2025-09-30T20:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.631952 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:41 crc kubenswrapper[4919]: E0930 20:14:41.632196 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.697695 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.697758 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.697775 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.697800 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.697819 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:41Z","lastTransitionTime":"2025-09-30T20:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.801104 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.801157 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.801174 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.801201 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.801259 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:41Z","lastTransitionTime":"2025-09-30T20:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.904424 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.904498 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.904518 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.904548 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:41 crc kubenswrapper[4919]: I0930 20:14:41.904569 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:41Z","lastTransitionTime":"2025-09-30T20:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.007559 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.007620 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.007637 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.007666 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.007688 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.110801 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.110843 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.110853 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.110870 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.110880 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.224354 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.224402 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.224413 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.224433 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.224442 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.327483 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.327531 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.327541 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.327555 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.327566 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.429876 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.429915 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.429925 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.429942 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.429953 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.533814 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.533864 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.533881 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.533901 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.533915 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.631442 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:42 crc kubenswrapper[4919]: E0930 20:14:42.631575 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.631758 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.631794 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:42 crc kubenswrapper[4919]: E0930 20:14:42.631814 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:42 crc kubenswrapper[4919]: E0930 20:14:42.631921 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.637369 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.637408 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.637420 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.637434 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.637445 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.739859 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.739931 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.739942 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.739966 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.739978 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.824540 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.824596 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.824608 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.824625 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.824636 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: E0930 20:14:42.840736 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:42Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.844187 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.844242 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.844255 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.844275 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.844289 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: E0930 20:14:42.861457 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:42Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.866478 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.866554 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.866581 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.866612 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.866636 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: E0930 20:14:42.889398 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:42Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.894625 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.894677 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.894688 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.894705 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.894716 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: E0930 20:14:42.913160 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:42Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.917502 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.917569 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.917595 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.917629 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.917654 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:42 crc kubenswrapper[4919]: E0930 20:14:42.937635 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:42Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:42 crc kubenswrapper[4919]: E0930 20:14:42.937798 4919 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.939952 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.940007 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.940026 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.940053 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:42 crc kubenswrapper[4919]: I0930 20:14:42.940072 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:42Z","lastTransitionTime":"2025-09-30T20:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.042750 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.042819 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.042838 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.042867 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.042892 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:43Z","lastTransitionTime":"2025-09-30T20:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.145968 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.146020 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.146035 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.146055 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.146070 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:43Z","lastTransitionTime":"2025-09-30T20:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.248811 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.248867 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.248876 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.248897 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.248910 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:43Z","lastTransitionTime":"2025-09-30T20:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.351671 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.351760 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.351771 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.351793 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.351813 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:43Z","lastTransitionTime":"2025-09-30T20:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.453711 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.453742 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.453751 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.453764 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.453774 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:43Z","lastTransitionTime":"2025-09-30T20:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.556399 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.556462 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.556478 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.556498 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.556516 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:43Z","lastTransitionTime":"2025-09-30T20:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.632625 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:43 crc kubenswrapper[4919]: E0930 20:14:43.632944 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.659867 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.659952 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.659972 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.659997 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.660013 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:43Z","lastTransitionTime":"2025-09-30T20:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.722112 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:43 crc kubenswrapper[4919]: E0930 20:14:43.722363 4919 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:43 crc kubenswrapper[4919]: E0930 20:14:43.722436 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs podName:c0624d31-70fc-4d66-a31b-4e67896ab40e nodeName:}" failed. No retries permitted until 2025-09-30 20:15:15.722413272 +0000 UTC m=+100.838446439 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs") pod "network-metrics-daemon-bwpdf" (UID: "c0624d31-70fc-4d66-a31b-4e67896ab40e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.762875 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.762937 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.762949 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.762968 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.762979 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:43Z","lastTransitionTime":"2025-09-30T20:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.865204 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.865272 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.865285 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.865305 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.865317 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:43Z","lastTransitionTime":"2025-09-30T20:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.967395 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.967624 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.967778 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.967857 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:43 crc kubenswrapper[4919]: I0930 20:14:43.967920 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:43Z","lastTransitionTime":"2025-09-30T20:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.071199 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.071527 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.071620 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.071726 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.071814 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:44Z","lastTransitionTime":"2025-09-30T20:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.173855 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.173947 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.173962 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.173987 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.174000 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:44Z","lastTransitionTime":"2025-09-30T20:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.276059 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.276095 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.276104 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.276119 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.276129 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:44Z","lastTransitionTime":"2025-09-30T20:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.378634 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.378695 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.378715 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.378738 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.378757 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:44Z","lastTransitionTime":"2025-09-30T20:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.481972 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.482040 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.482055 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.482084 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.482102 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:44Z","lastTransitionTime":"2025-09-30T20:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.585390 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.585450 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.585468 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.585491 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.585508 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:44Z","lastTransitionTime":"2025-09-30T20:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.631299 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.631353 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:44 crc kubenswrapper[4919]: E0930 20:14:44.631514 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:44 crc kubenswrapper[4919]: E0930 20:14:44.631677 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.631856 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:44 crc kubenswrapper[4919]: E0930 20:14:44.632021 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.689180 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.689264 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.689281 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.689307 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.689324 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:44Z","lastTransitionTime":"2025-09-30T20:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.791729 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.791791 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.791808 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.791835 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.791853 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:44Z","lastTransitionTime":"2025-09-30T20:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.895551 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.895590 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.895599 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.895617 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.895635 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:44Z","lastTransitionTime":"2025-09-30T20:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.999452 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.999515 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.999530 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.999557 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:44 crc kubenswrapper[4919]: I0930 20:14:44.999570 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:44Z","lastTransitionTime":"2025-09-30T20:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.101901 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.101948 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.101958 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.101975 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.101986 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:45Z","lastTransitionTime":"2025-09-30T20:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.130762 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-c5crr_e3e33a72-0a49-4944-a2c2-ac16183942cf/kube-multus/0.log" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.130817 4919 generic.go:334] "Generic (PLEG): container finished" podID="e3e33a72-0a49-4944-a2c2-ac16183942cf" containerID="503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a" exitCode=1 Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.130849 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-c5crr" event={"ID":"e3e33a72-0a49-4944-a2c2-ac16183942cf","Type":"ContainerDied","Data":"503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a"} Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.131238 4919 scope.go:117] "RemoveContainer" containerID="503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.157042 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.175430 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.190854 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.204462 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.204763 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.204858 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.204967 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.205078 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:45Z","lastTransitionTime":"2025-09-30T20:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.204952 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.216518 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.233775 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.248785 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:45Z\\\",\\\"message\\\":\\\"2025-09-30T20:13:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755\\\\n2025-09-30T20:13:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755 to /host/opt/cni/bin/\\\\n2025-09-30T20:14:00Z [verbose] multus-daemon started\\\\n2025-09-30T20:14:00Z [verbose] Readiness Indicator file check\\\\n2025-09-30T20:14:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.268187 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"ase_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF0930 20:14:22.642533 6569 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z]\\\\nI0930 20:14:22.642532 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-controllers]} name:Service_openshift-machine-api/machine-api-controllers_TCP_cluster opt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.278846 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.290576 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.306506 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.307634 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.307661 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.307670 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.307685 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.307695 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:45Z","lastTransitionTime":"2025-09-30T20:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.317999 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.329531 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.348900 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.360991 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.372970 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.388786 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.399883 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.409619 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.409649 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.409660 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.409677 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.409688 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:45Z","lastTransitionTime":"2025-09-30T20:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.512786 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.513061 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.513133 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.513230 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.513308 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:45Z","lastTransitionTime":"2025-09-30T20:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.615771 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.615801 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.615809 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.615822 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.615830 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:45Z","lastTransitionTime":"2025-09-30T20:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.632311 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:45 crc kubenswrapper[4919]: E0930 20:14:45.632434 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.651442 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.662488 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.678926 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.690547 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.705112 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.717369 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.717421 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.717431 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.717454 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.717468 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:45Z","lastTransitionTime":"2025-09-30T20:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.724344 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.741744 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.755949 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:45Z\\\",\\\"message\\\":\\\"2025-09-30T20:13:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755\\\\n2025-09-30T20:13:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755 to /host/opt/cni/bin/\\\\n2025-09-30T20:14:00Z [verbose] multus-daemon started\\\\n2025-09-30T20:14:00Z [verbose] Readiness Indicator file check\\\\n2025-09-30T20:14:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.780973 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"ase_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF0930 20:14:22.642533 6569 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z]\\\\nI0930 20:14:22.642532 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-controllers]} name:Service_openshift-machine-api/machine-api-controllers_TCP_cluster opt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.798265 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.812393 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.819248 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.819312 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.819339 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.819442 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.819477 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:45Z","lastTransitionTime":"2025-09-30T20:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.833243 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.849977 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.861579 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.876661 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.911930 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.922293 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.922336 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.922347 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.922369 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.922381 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:45Z","lastTransitionTime":"2025-09-30T20:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.926624 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:45 crc kubenswrapper[4919]: I0930 20:14:45.938188 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:45Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.024761 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.024829 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.024842 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.024868 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.024884 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:46Z","lastTransitionTime":"2025-09-30T20:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.127678 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.127751 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.127762 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.127781 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.127796 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:46Z","lastTransitionTime":"2025-09-30T20:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.136579 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-c5crr_e3e33a72-0a49-4944-a2c2-ac16183942cf/kube-multus/0.log" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.136830 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-c5crr" event={"ID":"e3e33a72-0a49-4944-a2c2-ac16183942cf","Type":"ContainerStarted","Data":"aea57d9136d938eb7249c500e655688ca888930b02435845a1dcdf1712da37fb"} Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.159083 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.173791 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.184577 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.199898 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.218540 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea57d9136d938eb7249c500e655688ca888930b02435845a1dcdf1712da37fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:45Z\\\",\\\"message\\\":\\\"2025-09-30T20:13:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755\\\\n2025-09-30T20:13:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755 to /host/opt/cni/bin/\\\\n2025-09-30T20:14:00Z [verbose] multus-daemon started\\\\n2025-09-30T20:14:00Z [verbose] Readiness Indicator file check\\\\n2025-09-30T20:14:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.230621 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.230673 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.230690 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.230710 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.230727 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:46Z","lastTransitionTime":"2025-09-30T20:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.236271 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"ase_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF0930 20:14:22.642533 6569 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z]\\\\nI0930 20:14:22.642532 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-controllers]} name:Service_openshift-machine-api/machine-api-controllers_TCP_cluster opt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.253397 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.271122 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.285124 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.298298 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.309008 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.322736 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.333262 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.333286 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.333295 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.333311 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.333321 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:46Z","lastTransitionTime":"2025-09-30T20:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.343830 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.362140 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.375947 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.399875 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.413556 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.435682 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:46Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.435843 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.435859 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.435868 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.435885 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.435897 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:46Z","lastTransitionTime":"2025-09-30T20:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.539629 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.539685 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.539697 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.539722 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.539735 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:46Z","lastTransitionTime":"2025-09-30T20:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.631977 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.632022 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:46 crc kubenswrapper[4919]: E0930 20:14:46.632263 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:46 crc kubenswrapper[4919]: E0930 20:14:46.632644 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.633082 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:46 crc kubenswrapper[4919]: E0930 20:14:46.633299 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.642360 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.642422 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.642442 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.642466 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.642487 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:46Z","lastTransitionTime":"2025-09-30T20:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.744572 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.744636 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.744661 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.744691 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.744713 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:46Z","lastTransitionTime":"2025-09-30T20:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.848037 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.848102 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.848125 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.848155 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.848183 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:46Z","lastTransitionTime":"2025-09-30T20:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.951524 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.951566 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.951580 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.951597 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:46 crc kubenswrapper[4919]: I0930 20:14:46.951611 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:46Z","lastTransitionTime":"2025-09-30T20:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.054879 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.054929 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.054945 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.054975 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.054996 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:47Z","lastTransitionTime":"2025-09-30T20:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.158632 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.158701 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.158725 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.158757 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.158781 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:47Z","lastTransitionTime":"2025-09-30T20:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.261468 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.261517 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.261530 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.261551 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.261568 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:47Z","lastTransitionTime":"2025-09-30T20:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.363982 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.364043 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.364078 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.364121 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.364145 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:47Z","lastTransitionTime":"2025-09-30T20:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.467544 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.467618 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.467641 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.467671 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.467693 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:47Z","lastTransitionTime":"2025-09-30T20:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.570715 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.570775 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.570793 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.570840 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.570857 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:47Z","lastTransitionTime":"2025-09-30T20:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.632304 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:47 crc kubenswrapper[4919]: E0930 20:14:47.632491 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.632891 4919 scope.go:117] "RemoveContainer" containerID="08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.674197 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.674274 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.674295 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.674318 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.674336 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:47Z","lastTransitionTime":"2025-09-30T20:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.776657 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.776704 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.776714 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.776733 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.776745 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:47Z","lastTransitionTime":"2025-09-30T20:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.878461 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.878516 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.878525 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.878542 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.878552 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:47Z","lastTransitionTime":"2025-09-30T20:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.981032 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.981076 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.981091 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.981114 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:47 crc kubenswrapper[4919]: I0930 20:14:47.981131 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:47Z","lastTransitionTime":"2025-09-30T20:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.083181 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.083240 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.083253 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.083268 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.083281 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:48Z","lastTransitionTime":"2025-09-30T20:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.146252 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/2.log" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.149929 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce"} Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.150464 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.163005 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.174339 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.186262 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.186305 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.186319 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.186337 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.186350 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:48Z","lastTransitionTime":"2025-09-30T20:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.191608 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.205843 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.217039 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.230535 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.275554 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.289555 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.289624 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.289639 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.289661 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.289975 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:48Z","lastTransitionTime":"2025-09-30T20:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.296984 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.314856 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.331818 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.344883 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.355630 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.370334 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.384925 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea57d9136d938eb7249c500e655688ca888930b02435845a1dcdf1712da37fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:45Z\\\",\\\"message\\\":\\\"2025-09-30T20:13:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755\\\\n2025-09-30T20:13:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755 to /host/opt/cni/bin/\\\\n2025-09-30T20:14:00Z [verbose] multus-daemon started\\\\n2025-09-30T20:14:00Z [verbose] Readiness Indicator file check\\\\n2025-09-30T20:14:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.391761 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.391787 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.391795 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.391808 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.391817 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:48Z","lastTransitionTime":"2025-09-30T20:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.402465 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"ase_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF0930 20:14:22.642533 6569 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z]\\\\nI0930 20:14:22.642532 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-controllers]} name:Service_openshift-machine-api/machine-api-controllers_TCP_cluster opt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.418769 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.442092 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.451974 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:48Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.494112 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.494155 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.494165 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.494186 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.494198 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:48Z","lastTransitionTime":"2025-09-30T20:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.596669 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.596700 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.596708 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.596722 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.596734 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:48Z","lastTransitionTime":"2025-09-30T20:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.631273 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.631389 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:48 crc kubenswrapper[4919]: E0930 20:14:48.631596 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.631647 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:48 crc kubenswrapper[4919]: E0930 20:14:48.631816 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:48 crc kubenswrapper[4919]: E0930 20:14:48.631867 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.642832 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.698933 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.698965 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.698974 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.698988 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.698998 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:48Z","lastTransitionTime":"2025-09-30T20:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.801959 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.802022 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.802042 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.802061 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.802073 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:48Z","lastTransitionTime":"2025-09-30T20:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.904683 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.904747 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.904772 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.904804 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:48 crc kubenswrapper[4919]: I0930 20:14:48.904828 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:48Z","lastTransitionTime":"2025-09-30T20:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.007286 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.007344 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.007364 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.007390 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.007408 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:49Z","lastTransitionTime":"2025-09-30T20:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.110919 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.110986 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.111003 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.111028 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.111045 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:49Z","lastTransitionTime":"2025-09-30T20:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.157135 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/3.log" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.158200 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/2.log" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.162633 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce" exitCode=1 Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.162786 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce"} Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.162863 4919 scope.go:117] "RemoveContainer" containerID="08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.164024 4919 scope.go:117] "RemoveContainer" containerID="01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce" Sep 30 20:14:49 crc kubenswrapper[4919]: E0930 20:14:49.164396 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.185402 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.204467 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.213202 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.213292 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.213320 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.213349 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.213372 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:49Z","lastTransitionTime":"2025-09-30T20:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.223462 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.240846 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.257477 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ca7cbd8-53cd-42e1-84e7-5574fe80d26c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2e3a2e1c1876f84e897ccb3b0cca07a8802fb2afc2de8768c8033ec904b900a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce7938ca29df72ec7c8835e35a9d40bb2b3e15a4230cacf1904e05d105109a33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce7938ca29df72ec7c8835e35a9d40bb2b3e15a4230cacf1904e05d105109a33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.279998 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.299932 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.316295 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.316355 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.316374 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.316401 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.316421 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:49Z","lastTransitionTime":"2025-09-30T20:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.318364 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.338950 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.358766 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.377073 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.394914 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea57d9136d938eb7249c500e655688ca888930b02435845a1dcdf1712da37fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:45Z\\\",\\\"message\\\":\\\"2025-09-30T20:13:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755\\\\n2025-09-30T20:13:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755 to /host/opt/cni/bin/\\\\n2025-09-30T20:14:00Z [verbose] multus-daemon started\\\\n2025-09-30T20:14:00Z [verbose] Readiness Indicator file check\\\\n2025-09-30T20:14:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.418974 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.419050 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.419074 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.419106 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.419132 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:49Z","lastTransitionTime":"2025-09-30T20:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.424024 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08665e42072fac0eda3caa8a7ecfa6335be2cd45d07d619404026ed85d4b9001\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"message\\\":\\\"ase_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-target-xd92c] creating logical port openshift-network-diagnostics_network-check-target-xd92c for pod on switch crc\\\\nF0930 20:14:22.642533 6569 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:22Z is after 2025-08-24T17:21:41Z]\\\\nI0930 20:14:22.642532 6569 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-controllers]} name:Service_openshift-machine-api/machine-api-controllers_TCP_cluster opt\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:48Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 20:14:48.477059 6920 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.444684 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.463169 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.478884 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.495963 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.512389 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.522689 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.522722 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.522731 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.522745 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.522755 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:49Z","lastTransitionTime":"2025-09-30T20:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.525279 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:49Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.625641 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.625702 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.625718 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.625742 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.625759 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:49Z","lastTransitionTime":"2025-09-30T20:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.632054 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:49 crc kubenswrapper[4919]: E0930 20:14:49.632245 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.728542 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.728602 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.728625 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.728653 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.728674 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:49Z","lastTransitionTime":"2025-09-30T20:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.831965 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.832025 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.832066 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.832098 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.832124 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:49Z","lastTransitionTime":"2025-09-30T20:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.935150 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.935204 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.935261 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.935293 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:49 crc kubenswrapper[4919]: I0930 20:14:49.935316 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:49Z","lastTransitionTime":"2025-09-30T20:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.039012 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.039063 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.039076 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.039095 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.039108 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:50Z","lastTransitionTime":"2025-09-30T20:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.141935 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.142006 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.142024 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.142089 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.142109 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:50Z","lastTransitionTime":"2025-09-30T20:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.169095 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/3.log" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.175332 4919 scope.go:117] "RemoveContainer" containerID="01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce" Sep 30 20:14:50 crc kubenswrapper[4919]: E0930 20:14:50.175681 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.193277 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.214509 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.237747 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.247253 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.247319 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.247344 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.247377 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.247400 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:50Z","lastTransitionTime":"2025-09-30T20:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.257591 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.276491 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.299299 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea57d9136d938eb7249c500e655688ca888930b02435845a1dcdf1712da37fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:45Z\\\",\\\"message\\\":\\\"2025-09-30T20:13:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755\\\\n2025-09-30T20:13:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755 to /host/opt/cni/bin/\\\\n2025-09-30T20:14:00Z [verbose] multus-daemon started\\\\n2025-09-30T20:14:00Z [verbose] Readiness Indicator file check\\\\n2025-09-30T20:14:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.331776 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:48Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 20:14:48.477059 6920 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.347536 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.350745 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.350831 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.350850 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.350875 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.350931 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:50Z","lastTransitionTime":"2025-09-30T20:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.369629 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.388127 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.400902 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.414062 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.428535 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.446869 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.453449 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.453486 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.453498 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.453517 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.453530 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:50Z","lastTransitionTime":"2025-09-30T20:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.460817 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.473478 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.486398 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ca7cbd8-53cd-42e1-84e7-5574fe80d26c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2e3a2e1c1876f84e897ccb3b0cca07a8802fb2afc2de8768c8033ec904b900a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce7938ca29df72ec7c8835e35a9d40bb2b3e15a4230cacf1904e05d105109a33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce7938ca29df72ec7c8835e35a9d40bb2b3e15a4230cacf1904e05d105109a33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.520368 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.537998 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:50Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.556076 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.556136 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.556150 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.556166 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.556178 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:50Z","lastTransitionTime":"2025-09-30T20:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.631160 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.631163 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:50 crc kubenswrapper[4919]: E0930 20:14:50.631307 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.631409 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:50 crc kubenswrapper[4919]: E0930 20:14:50.631594 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:50 crc kubenswrapper[4919]: E0930 20:14:50.631671 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.659829 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.659877 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.659888 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.659910 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.659925 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:50Z","lastTransitionTime":"2025-09-30T20:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.762886 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.762950 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.762967 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.762991 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.763009 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:50Z","lastTransitionTime":"2025-09-30T20:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.866276 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.866346 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.866359 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.866383 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.866400 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:50Z","lastTransitionTime":"2025-09-30T20:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.969492 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.969537 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.969550 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.969567 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:50 crc kubenswrapper[4919]: I0930 20:14:50.969578 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:50Z","lastTransitionTime":"2025-09-30T20:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.072254 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.072578 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.072739 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.072884 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.073006 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:51Z","lastTransitionTime":"2025-09-30T20:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.176762 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.177263 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.177493 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.177712 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.177898 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:51Z","lastTransitionTime":"2025-09-30T20:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.281340 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.281402 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.281424 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.281454 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.281476 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:51Z","lastTransitionTime":"2025-09-30T20:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.384631 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.384694 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.384712 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.384740 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.384758 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:51Z","lastTransitionTime":"2025-09-30T20:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.487022 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.487085 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.487104 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.487130 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.487147 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:51Z","lastTransitionTime":"2025-09-30T20:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.590194 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.590359 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.590379 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.590405 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.590423 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:51Z","lastTransitionTime":"2025-09-30T20:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.632395 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:51 crc kubenswrapper[4919]: E0930 20:14:51.632655 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.693432 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.693540 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.693564 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.693593 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.693619 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:51Z","lastTransitionTime":"2025-09-30T20:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.796771 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.796809 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.796819 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.796833 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.796843 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:51Z","lastTransitionTime":"2025-09-30T20:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.900397 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.900487 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.900508 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.900542 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:51 crc kubenswrapper[4919]: I0930 20:14:51.900565 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:51Z","lastTransitionTime":"2025-09-30T20:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.003547 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.003613 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.003630 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.003655 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.003673 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:52Z","lastTransitionTime":"2025-09-30T20:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.106171 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.106302 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.106322 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.106347 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.106365 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:52Z","lastTransitionTime":"2025-09-30T20:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.209640 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.209716 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.209744 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.209775 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.209798 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:52Z","lastTransitionTime":"2025-09-30T20:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.313117 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.313173 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.313193 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.313242 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.313260 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:52Z","lastTransitionTime":"2025-09-30T20:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.416071 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.416140 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.416157 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.416186 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.416204 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:52Z","lastTransitionTime":"2025-09-30T20:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.518910 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.519002 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.519092 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.519121 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.519141 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:52Z","lastTransitionTime":"2025-09-30T20:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.622581 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.622675 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.622695 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.622725 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.622755 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:52Z","lastTransitionTime":"2025-09-30T20:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.632139 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.632210 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:52 crc kubenswrapper[4919]: E0930 20:14:52.632318 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:52 crc kubenswrapper[4919]: E0930 20:14:52.632447 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.632561 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:52 crc kubenswrapper[4919]: E0930 20:14:52.632704 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.725271 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.725319 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.725338 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.725361 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.725379 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:52Z","lastTransitionTime":"2025-09-30T20:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.828549 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.828627 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.828652 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.828683 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.828704 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:52Z","lastTransitionTime":"2025-09-30T20:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.931657 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.931730 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.931757 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.931787 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:52 crc kubenswrapper[4919]: I0930 20:14:52.931812 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:52Z","lastTransitionTime":"2025-09-30T20:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.045151 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.045261 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.045281 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.045299 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.045312 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.073157 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.073196 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.073207 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.073239 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.073252 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: E0930 20:14:53.091167 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:53Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.096960 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.097009 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.097023 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.097043 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.097055 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: E0930 20:14:53.117150 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:53Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.124172 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.124203 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.124226 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.124240 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.124250 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: E0930 20:14:53.143550 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:53Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.148405 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.148441 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.148455 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.148472 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.148482 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: E0930 20:14:53.167121 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:53Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.171671 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.171708 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.171719 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.171735 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.171747 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: E0930 20:14:53.187437 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:53Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:53 crc kubenswrapper[4919]: E0930 20:14:53.187667 4919 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.189427 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.189497 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.189522 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.189555 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.189574 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.293148 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.293243 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.293263 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.293291 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.293320 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.396426 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.396508 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.396534 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.396566 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.396590 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.499855 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.499913 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.499932 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.499958 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.499979 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.602584 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.602647 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.602663 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.602688 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.602705 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.632359 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:53 crc kubenswrapper[4919]: E0930 20:14:53.632602 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.706572 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.706651 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.706698 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.706725 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.706743 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.810113 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.810175 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.810196 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.810297 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.810334 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.913278 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.913541 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.913634 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.913718 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:53 crc kubenswrapper[4919]: I0930 20:14:53.913794 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:53Z","lastTransitionTime":"2025-09-30T20:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.017346 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.017737 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.017893 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.018051 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.018201 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:54Z","lastTransitionTime":"2025-09-30T20:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.121700 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.121765 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.121783 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.121809 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.121826 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:54Z","lastTransitionTime":"2025-09-30T20:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.224541 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.224588 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.224604 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.224626 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.224644 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:54Z","lastTransitionTime":"2025-09-30T20:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.327606 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.327972 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.328117 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.328336 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.328491 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:54Z","lastTransitionTime":"2025-09-30T20:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.431631 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.431675 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.431687 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.431707 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.431719 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:54Z","lastTransitionTime":"2025-09-30T20:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.535308 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.535398 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.535421 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.535457 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.535475 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:54Z","lastTransitionTime":"2025-09-30T20:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.631615 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.631757 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.631655 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:54 crc kubenswrapper[4919]: E0930 20:14:54.631848 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:54 crc kubenswrapper[4919]: E0930 20:14:54.631941 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:54 crc kubenswrapper[4919]: E0930 20:14:54.632050 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.637682 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.637745 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.637763 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.637787 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.637806 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:54Z","lastTransitionTime":"2025-09-30T20:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.740970 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.741040 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.741060 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.741086 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.741108 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:54Z","lastTransitionTime":"2025-09-30T20:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.844035 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.844113 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.844134 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.844164 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.844187 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:54Z","lastTransitionTime":"2025-09-30T20:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.947846 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.947918 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.947942 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.947969 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:54 crc kubenswrapper[4919]: I0930 20:14:54.948016 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:54Z","lastTransitionTime":"2025-09-30T20:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.051566 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.051626 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.051645 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.051677 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.051700 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:55Z","lastTransitionTime":"2025-09-30T20:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.154110 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.154187 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.154204 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.154256 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.154274 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:55Z","lastTransitionTime":"2025-09-30T20:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.259883 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.259940 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.259958 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.259983 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.260025 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:55Z","lastTransitionTime":"2025-09-30T20:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.362371 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.362433 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.362459 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.362492 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.362518 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:55Z","lastTransitionTime":"2025-09-30T20:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.465199 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.465276 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.465292 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.465314 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.465331 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:55Z","lastTransitionTime":"2025-09-30T20:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.568190 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.568278 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.568297 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.568325 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.568344 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:55Z","lastTransitionTime":"2025-09-30T20:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.631473 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:55 crc kubenswrapper[4919]: E0930 20:14:55.631635 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.652649 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.669665 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.670638 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.670698 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.670724 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.670755 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.670777 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:55Z","lastTransitionTime":"2025-09-30T20:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.689102 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.708104 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.731908 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea57d9136d938eb7249c500e655688ca888930b02435845a1dcdf1712da37fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:45Z\\\",\\\"message\\\":\\\"2025-09-30T20:13:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755\\\\n2025-09-30T20:13:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755 to /host/opt/cni/bin/\\\\n2025-09-30T20:14:00Z [verbose] multus-daemon started\\\\n2025-09-30T20:14:00Z [verbose] Readiness Indicator file check\\\\n2025-09-30T20:14:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.764668 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:48Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 20:14:48.477059 6920 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.773612 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.773658 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.773669 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.773689 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.773702 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:55Z","lastTransitionTime":"2025-09-30T20:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.783006 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.799314 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.816541 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.832741 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.846291 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.861264 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.876412 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.876455 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.876472 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.876494 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.876510 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:55Z","lastTransitionTime":"2025-09-30T20:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.878989 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.890000 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.901660 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.912181 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ca7cbd8-53cd-42e1-84e7-5574fe80d26c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2e3a2e1c1876f84e897ccb3b0cca07a8802fb2afc2de8768c8033ec904b900a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce7938ca29df72ec7c8835e35a9d40bb2b3e15a4230cacf1904e05d105109a33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce7938ca29df72ec7c8835e35a9d40bb2b3e15a4230cacf1904e05d105109a33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.939401 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.963945 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.979500 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.979548 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.979564 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.979586 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.979603 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:55Z","lastTransitionTime":"2025-09-30T20:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:55 crc kubenswrapper[4919]: I0930 20:14:55.980706 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:14:55Z is after 2025-08-24T17:21:41Z" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.082489 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.082814 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.082973 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.083189 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.083375 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:56Z","lastTransitionTime":"2025-09-30T20:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.187161 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.187208 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.187260 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.187284 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.187300 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:56Z","lastTransitionTime":"2025-09-30T20:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.290539 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.290603 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.290647 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.290673 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.290686 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:56Z","lastTransitionTime":"2025-09-30T20:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.393395 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.393459 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.393476 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.393502 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.393519 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:56Z","lastTransitionTime":"2025-09-30T20:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.496414 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.496487 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.496509 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.496540 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.496566 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:56Z","lastTransitionTime":"2025-09-30T20:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.599827 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.599907 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.599933 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.599968 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.599992 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:56Z","lastTransitionTime":"2025-09-30T20:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.632149 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.632306 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.632157 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:56 crc kubenswrapper[4919]: E0930 20:14:56.632460 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:56 crc kubenswrapper[4919]: E0930 20:14:56.632658 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:56 crc kubenswrapper[4919]: E0930 20:14:56.632705 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.703247 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.703306 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.703327 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.703358 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.703381 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:56Z","lastTransitionTime":"2025-09-30T20:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.805576 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.805614 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.805625 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.805645 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.805660 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:56Z","lastTransitionTime":"2025-09-30T20:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.908905 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.908976 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.908998 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.909027 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:56 crc kubenswrapper[4919]: I0930 20:14:56.909046 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:56Z","lastTransitionTime":"2025-09-30T20:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.011957 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.012016 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.012037 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.012063 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.012082 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:57Z","lastTransitionTime":"2025-09-30T20:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.114488 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.114590 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.114613 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.114644 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.114667 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:57Z","lastTransitionTime":"2025-09-30T20:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.216656 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.216714 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.216730 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.216754 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.216772 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:57Z","lastTransitionTime":"2025-09-30T20:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.319928 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.319982 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.319998 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.320019 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.320037 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:57Z","lastTransitionTime":"2025-09-30T20:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.423502 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.423571 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.423594 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.423631 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.423655 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:57Z","lastTransitionTime":"2025-09-30T20:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.526474 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.526546 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.526557 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.526598 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.526612 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:57Z","lastTransitionTime":"2025-09-30T20:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.629938 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.629997 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.630016 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.630040 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.630058 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:57Z","lastTransitionTime":"2025-09-30T20:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.631313 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:57 crc kubenswrapper[4919]: E0930 20:14:57.631518 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.733997 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.734086 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.734107 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.734136 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.734156 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:57Z","lastTransitionTime":"2025-09-30T20:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.837366 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.837450 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.837469 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.837494 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.837512 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:57Z","lastTransitionTime":"2025-09-30T20:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.940921 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.940970 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.940986 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.941009 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:57 crc kubenswrapper[4919]: I0930 20:14:57.941026 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:57Z","lastTransitionTime":"2025-09-30T20:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.043917 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.043976 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.043998 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.044027 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.044049 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:58Z","lastTransitionTime":"2025-09-30T20:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.146781 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.146853 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.146873 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.146899 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.146917 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:58Z","lastTransitionTime":"2025-09-30T20:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.249612 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.249657 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.249671 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.249689 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.249713 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:58Z","lastTransitionTime":"2025-09-30T20:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.353016 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.353083 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.353099 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.353118 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.353131 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:58Z","lastTransitionTime":"2025-09-30T20:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.456190 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.456235 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.456244 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.456256 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.456265 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:58Z","lastTransitionTime":"2025-09-30T20:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.559524 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.559960 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.559971 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.559993 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.560010 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:58Z","lastTransitionTime":"2025-09-30T20:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.631894 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.631995 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.632096 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:58 crc kubenswrapper[4919]: E0930 20:14:58.632085 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:14:58 crc kubenswrapper[4919]: E0930 20:14:58.632348 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:14:58 crc kubenswrapper[4919]: E0930 20:14:58.632704 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.663539 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.663588 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.663606 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.663630 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.663649 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:58Z","lastTransitionTime":"2025-09-30T20:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.766874 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.766921 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.766937 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.766961 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.766977 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:58Z","lastTransitionTime":"2025-09-30T20:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.870593 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.870651 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.870669 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.870696 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.870714 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:58Z","lastTransitionTime":"2025-09-30T20:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.974193 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.974511 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.974594 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.974686 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:58 crc kubenswrapper[4919]: I0930 20:14:58.974763 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:58Z","lastTransitionTime":"2025-09-30T20:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.078053 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.078104 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.078115 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.078132 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.078145 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:59Z","lastTransitionTime":"2025-09-30T20:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.102081 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.102299 4919 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.102815 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.102781463 +0000 UTC m=+148.218814630 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.180562 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.180601 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.180611 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.180630 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.180640 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:59Z","lastTransitionTime":"2025-09-30T20:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.203109 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.203240 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.203192877 +0000 UTC m=+148.319226004 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.203699 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.203886 4919 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.204045 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.204124 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.204099563 +0000 UTC m=+148.320132690 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.204161 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.204265 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.204278 4919 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.204340 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.20432931 +0000 UTC m=+148.320362437 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.204814 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.204918 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.205195 4919 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.205353 4919 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.205485 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.205471333 +0000 UTC m=+148.321504480 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.283399 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.283440 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.283453 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.283475 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.283487 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:59Z","lastTransitionTime":"2025-09-30T20:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.387064 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.387141 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.387159 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.387189 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.387249 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:59Z","lastTransitionTime":"2025-09-30T20:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.491002 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.491053 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.491070 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.491091 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.491106 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:59Z","lastTransitionTime":"2025-09-30T20:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.594431 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.594494 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.594513 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.594550 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.594568 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:59Z","lastTransitionTime":"2025-09-30T20:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.631595 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:14:59 crc kubenswrapper[4919]: E0930 20:14:59.631805 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.696794 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.696832 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.696843 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.696860 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.696873 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:59Z","lastTransitionTime":"2025-09-30T20:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.800259 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.800318 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.800337 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.800361 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.800378 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:59Z","lastTransitionTime":"2025-09-30T20:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.902684 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.902952 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.903015 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.903092 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:14:59 crc kubenswrapper[4919]: I0930 20:14:59.903162 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:14:59Z","lastTransitionTime":"2025-09-30T20:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.005926 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.005968 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.005980 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.006000 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.006013 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:00Z","lastTransitionTime":"2025-09-30T20:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.110318 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.110396 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.110415 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.110443 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.110460 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:00Z","lastTransitionTime":"2025-09-30T20:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.212303 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.212341 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.212350 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.212367 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.212377 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:00Z","lastTransitionTime":"2025-09-30T20:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.315425 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.315490 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.315508 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.315532 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.315548 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:00Z","lastTransitionTime":"2025-09-30T20:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.417942 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.417999 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.418018 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.418043 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.418061 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:00Z","lastTransitionTime":"2025-09-30T20:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.520673 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.520735 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.520752 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.520776 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.520793 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:00Z","lastTransitionTime":"2025-09-30T20:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.624476 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.624521 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.624531 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.624549 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.624561 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:00Z","lastTransitionTime":"2025-09-30T20:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.631941 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.632031 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.631941 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:00 crc kubenswrapper[4919]: E0930 20:15:00.632167 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:00 crc kubenswrapper[4919]: E0930 20:15:00.632368 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:00 crc kubenswrapper[4919]: E0930 20:15:00.632560 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.726835 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.726910 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.726934 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.726964 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.726987 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:00Z","lastTransitionTime":"2025-09-30T20:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.829160 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.829195 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.829205 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.829257 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.829276 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:00Z","lastTransitionTime":"2025-09-30T20:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.931706 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.931762 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.931783 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.931855 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:00 crc kubenswrapper[4919]: I0930 20:15:00.931921 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:00Z","lastTransitionTime":"2025-09-30T20:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.035170 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.035208 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.035234 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.035254 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.035263 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:01Z","lastTransitionTime":"2025-09-30T20:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.137200 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.137298 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.137315 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.137339 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.137356 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:01Z","lastTransitionTime":"2025-09-30T20:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.240631 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.240695 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.240745 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.240770 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.240789 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:01Z","lastTransitionTime":"2025-09-30T20:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.343463 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.343529 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.343549 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.343570 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.343616 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:01Z","lastTransitionTime":"2025-09-30T20:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.446615 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.446651 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.446660 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.446674 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.446684 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:01Z","lastTransitionTime":"2025-09-30T20:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.549307 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.549360 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.549373 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.549390 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.549401 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:01Z","lastTransitionTime":"2025-09-30T20:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.632191 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:01 crc kubenswrapper[4919]: E0930 20:15:01.632446 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.652093 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.652141 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.652155 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.652174 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.652188 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:01Z","lastTransitionTime":"2025-09-30T20:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.755430 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.755492 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.755515 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.755546 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.755564 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:01Z","lastTransitionTime":"2025-09-30T20:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.858414 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.858450 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.858459 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.858473 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.858483 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:01Z","lastTransitionTime":"2025-09-30T20:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.961590 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.961662 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.961675 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.961694 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:01 crc kubenswrapper[4919]: I0930 20:15:01.961705 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:01Z","lastTransitionTime":"2025-09-30T20:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.064429 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.064528 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.064551 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.064580 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.064601 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:02Z","lastTransitionTime":"2025-09-30T20:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.166616 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.166683 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.166745 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.166765 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.167110 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:02Z","lastTransitionTime":"2025-09-30T20:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.270350 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.270386 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.270397 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.270413 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.270425 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:02Z","lastTransitionTime":"2025-09-30T20:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.373903 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.373971 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.373990 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.374014 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.374032 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:02Z","lastTransitionTime":"2025-09-30T20:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.477276 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.477311 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.477321 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.477337 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.477346 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:02Z","lastTransitionTime":"2025-09-30T20:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.580403 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.580461 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.580481 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.580504 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.580521 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:02Z","lastTransitionTime":"2025-09-30T20:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.632012 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.632078 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.632052 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:02 crc kubenswrapper[4919]: E0930 20:15:02.632333 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:02 crc kubenswrapper[4919]: E0930 20:15:02.632549 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:02 crc kubenswrapper[4919]: E0930 20:15:02.632681 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.683088 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.683140 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.683158 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.683184 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.683201 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:02Z","lastTransitionTime":"2025-09-30T20:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.786414 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.786468 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.786485 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.786510 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.786531 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:02Z","lastTransitionTime":"2025-09-30T20:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.910789 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.910829 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.910842 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.910865 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:02 crc kubenswrapper[4919]: I0930 20:15:02.910881 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:02Z","lastTransitionTime":"2025-09-30T20:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.013721 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.013771 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.013786 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.013807 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.013821 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.117426 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.117499 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.117523 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.117620 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.117696 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.220703 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.220753 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.220771 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.220792 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.220808 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.232123 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.232211 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.232273 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.232308 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.232334 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: E0930 20:15:03.258035 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.263621 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.263672 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.263692 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.263718 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.263735 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: E0930 20:15:03.283344 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.288428 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.288494 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.288518 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.288549 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.288571 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: E0930 20:15:03.309924 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.315555 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.315619 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.315637 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.315666 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.315685 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: E0930 20:15:03.335197 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.341008 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.341080 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.341099 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.341127 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.341146 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: E0930 20:15:03.361493 4919 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-30T20:15:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"cc7fbf32-6c9b-4b90-bd86-52c553e5254d\\\",\\\"systemUUID\\\":\\\"624bf2d2-e12c-4fba-8731-56406029f22b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:03Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:03 crc kubenswrapper[4919]: E0930 20:15:03.362064 4919 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.364813 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.364893 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.364913 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.364941 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.364960 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.468520 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.468620 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.468644 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.468669 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.468687 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.572293 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.572393 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.572412 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.572471 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.572488 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.631778 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:03 crc kubenswrapper[4919]: E0930 20:15:03.633525 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.639693 4919 scope.go:117] "RemoveContainer" containerID="01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce" Sep 30 20:15:03 crc kubenswrapper[4919]: E0930 20:15:03.640050 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.675654 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.675961 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.676075 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.676202 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.676390 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.778443 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.778494 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.778510 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.778535 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.778551 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.880927 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.881016 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.881032 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.881073 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.881088 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.984378 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.985487 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.985671 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.985837 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:03 crc kubenswrapper[4919]: I0930 20:15:03.986014 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:03Z","lastTransitionTime":"2025-09-30T20:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.088634 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.088702 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.088726 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.088755 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.088775 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:04Z","lastTransitionTime":"2025-09-30T20:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.191806 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.191841 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.191852 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.191873 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.191885 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:04Z","lastTransitionTime":"2025-09-30T20:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.294810 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.294866 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.294878 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.294895 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.294909 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:04Z","lastTransitionTime":"2025-09-30T20:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.397684 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.398034 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.398256 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.398422 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.398628 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:04Z","lastTransitionTime":"2025-09-30T20:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.501152 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.501189 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.501200 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.501242 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.501255 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:04Z","lastTransitionTime":"2025-09-30T20:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.603324 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.603555 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.603678 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.603836 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.603913 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:04Z","lastTransitionTime":"2025-09-30T20:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.632202 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.632510 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:04 crc kubenswrapper[4919]: E0930 20:15:04.632548 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:04 crc kubenswrapper[4919]: E0930 20:15:04.632758 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.632231 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:04 crc kubenswrapper[4919]: E0930 20:15:04.632944 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.706526 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.706590 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.706608 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.706651 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.706670 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:04Z","lastTransitionTime":"2025-09-30T20:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.808939 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.809001 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.809025 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.809053 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.809073 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:04Z","lastTransitionTime":"2025-09-30T20:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.912282 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.912336 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.912358 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.912384 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:04 crc kubenswrapper[4919]: I0930 20:15:04.912403 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:04Z","lastTransitionTime":"2025-09-30T20:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.015848 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.015907 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.015924 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.015949 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.015967 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:05Z","lastTransitionTime":"2025-09-30T20:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.129778 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.130116 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.130358 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.130562 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.130769 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:05Z","lastTransitionTime":"2025-09-30T20:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.233670 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.233727 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.233747 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.233771 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.233789 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:05Z","lastTransitionTime":"2025-09-30T20:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.337197 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.337274 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.337291 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.337314 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.337336 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:05Z","lastTransitionTime":"2025-09-30T20:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.440327 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.440392 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.440420 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.440453 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.440476 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:05Z","lastTransitionTime":"2025-09-30T20:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.543960 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.544030 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.544046 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.544067 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.544080 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:05Z","lastTransitionTime":"2025-09-30T20:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.631495 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:05 crc kubenswrapper[4919]: E0930 20:15:05.631739 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.647286 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.647329 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.647338 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.647358 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.647369 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:05Z","lastTransitionTime":"2025-09-30T20:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.652950 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9ba798aa-64e5-4117-a63f-50d690a52b19\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ba495222c367ae1d9d1fd2c5af21979a542f1a573a327d96883cd8a0c1256004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8272f68c11f203bdc5d6db18b3cba3952387f66817e22fa4f14d522c6fa1a533\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92058d3095888d246f745bf5c32528b9df402c25765cb63e7df1a054478f3cdc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4dc129e21b83425352238c4d864384b17a085970b290f31390a89f3d3e4198f3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.675865 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a1ab1b2e-3bf7-4956-9042-66429245b189\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71e0d2e9ce119fe3ba8ff5bce5e0b188447597d806d6b84e5efd39771deb825d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://059f78e118688956bf278ec5d77973bde758aa6e38966f9edce81742dd19f7de\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a945c27c12a8879ddb985915beee31b2cb48807a925f4432a04ac53fb31cefcc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ed7002a80717ef33664bc7d282948a812365e4ad6a82fdeed31cfd1e91d5ce1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c538d66329ea0595d17b9c3149e6ebab48fe499dbc620fa29391714286382e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94108f502cdbcb1eab8d2e791a3e1e21c67c1cb224a44e691ce443ea5179f115\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://af875e913d17c943a35f4ae9fb002c01633d4a532e95bcdb684feb358d84bd30\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dldw7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-s6g9s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.694093 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4930c0a7-d9e1-447d-945c-7d44124b6340\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://04287447e30cbd547b865b6dec80a4eaaa5a7be96978210a89c6ad4170d00f84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://609bf21073c9a85a6fbad25ba8b7495051247416465f49d2adb8af368acbecb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rxrx5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-tvtj4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.711144 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c0624d31-70fc-4d66-a31b-4e67896ab40e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:11Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pp9g2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:11Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-bwpdf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.725363 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2ca7cbd8-53cd-42e1-84e7-5574fe80d26c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2e3a2e1c1876f84e897ccb3b0cca07a8802fb2afc2de8768c8033ec904b900a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ce7938ca29df72ec7c8835e35a9d40bb2b3e15a4230cacf1904e05d105109a33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ce7938ca29df72ec7c8835e35a9d40bb2b3e15a4230cacf1904e05d105109a33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.750478 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.750520 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.750532 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.750566 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.750580 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:05Z","lastTransitionTime":"2025-09-30T20:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.759397 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4edb68f2-3acb-47cc-b573-a245c15443d8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de97634732143769e9b083c88f6a17ad71d7a108622c3998c74ec06879a265d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c12b5f0a6d64dc37df622aba029c3d6d31983ed9c3ff7c97ef796389272c89e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c60ef9298310eaaafdfec43e189366aa57c02b74df623f5c890e3d650a10a52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20a9ff4356b5231bc5aa091a099de3e6c3ce2096dedf3349aaeaf4a5e5060279\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fe556661f64f4371bd794ce60e3e5d29db87129c3a49bd386ebee51f080a9e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4e83d15bc3d86fb70b68dbab652f9c06ff8689ca4c4c756662d57f2c52dffa0c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3427a7a55998d829e105625bd7919c13fc39995a1740af126f66d0a438d738ed\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b55ce542cb5650638017292eab7a73225501d72fad0049436af515294b1d2bb1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.778819 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.794488 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-5xx2l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"02a9256e-b65e-4ed6-877b-27c4fa0d3339\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb199ea530554f87a066de72702072fdc5d009319b032a25b64b8ab56462b49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-62wnk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-5xx2l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.811434 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1ae243b9-c348-4ad5-9d4e-7be96f9d4c11\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39adb3ef41a3bee7f731d54aadd978bff54b2dc04a1dd856459606d407bcf89e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4440163d884f5c19612f2e39097a492193772c981a35796c16a87be2a366ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d0c1a7feda4d4464d265c918de81f4ac71907e6ca0f34a25d39486b22e757c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://676504c83c39cb68789249ba1f3e925934f0a4f4f1f277418ef9533da5269eec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.828094 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.853746 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.853791 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.853802 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.853820 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.853833 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:05Z","lastTransitionTime":"2025-09-30T20:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.858865 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"af48d482-2587-4521-ba91-56d35b0e487d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:48Z\\\",\\\"message\\\":\\\"w:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:update Table:NAT Row:map[external_ip:192.168.126.11 logical_ip:10.217.0.92 options:{GoMap:map[stateless:false]} type:snat] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {73135118-cf1b-4568-bd31-2f50308bf69d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Router Row:map[] Rows:[] Columns:[] Mutations:[{Column:nat Mutator:insert Value:{GoSet:[{GoUUID:73135118-cf1b-4568-bd31-2f50308bf69d}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {e3c4661a-36a6-47f0-a6c0-a4ee741f2224}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0930 20:14:48.477059 6920 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:14:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9jx9g\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:58Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-4p25c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.881375 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bb29cc5a-4847-4c5e-a22e-443399123894\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83b6d6ba5bcb77a114c556d5bee512128ec1343dc1ccf6e7e777326a5d0775e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1324a014e8c69a06bf54c908319a28775f803ed29724c8869023f65d93b7db7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://325a9a50819fe2bf17e842413573316386302c3a15cb2580bcef200743af158c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae7186689f26f8747fd3100d91292c3d183a5a7740bfa38888aeac1afbb9b177\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2f40272cbc5615ac7164d87071f4afcdd07712ad3bc29d7014c467df51b58e4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-30T20:13:49Z\\\",\\\"message\\\":\\\"W0930 20:13:38.835037 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0930 20:13:38.835662 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759263218 cert, and key in /tmp/serving-cert-1977770590/serving-signer.crt, /tmp/serving-cert-1977770590/serving-signer.key\\\\nI0930 20:13:39.097661 1 observer_polling.go:159] Starting file observer\\\\nW0930 20:13:39.100116 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0930 20:13:39.100331 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0930 20:13:39.102711 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1977770590/tls.crt::/tmp/serving-cert-1977770590/tls.key\\\\\\\"\\\\nF0930 20:13:49.412143 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48272c99aa9233a177b536376b283fc4005c503e86b2d2703672f5aadb9dd84d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:38Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://08afe89c198c8d98beec48ee5f1af009435e8444ac1b3a94b0c408a279cda774\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-30T20:13:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.900382 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca0af5d89069ce0ca909e0d50d848ca3c10ae3c33511e1b9a335f173d7ee27e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.914111 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d6d16f928c3cf8ddefa6cd06453d40ffb9f222610f425086cb4fb427326c5d0e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac90df23dd731d29bcbae2bb496dc047dbc40196beb98d712f3f54ac09072397\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.931207 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.949743 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c739a9caadf8485942f27605fb5ccb9c19133871e05b8d9622008e3932dff0d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.955951 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.955977 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.956010 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.956025 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.956035 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:05Z","lastTransitionTime":"2025-09-30T20:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.966550 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb371a63-6d82-453e-930e-656710b97f10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d3df7ca58d79c0bb74abcd19cc17166cd91a89df65226119a627cb7efa6caef2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mp5r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-p4zv6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:05 crc kubenswrapper[4919]: I0930 20:15:05.985620 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-c5crr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e33a72-0a49-4944-a2c2-ac16183942cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:13:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea57d9136d938eb7249c500e655688ca888930b02435845a1dcdf1712da37fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-30T20:14:45Z\\\",\\\"message\\\":\\\"2025-09-30T20:13:59+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755\\\\n2025-09-30T20:13:59+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_ab0b1b54-2e71-4810-87cb-6d01ed191755 to /host/opt/cni/bin/\\\\n2025-09-30T20:14:00Z [verbose] multus-daemon started\\\\n2025-09-30T20:14:00Z [verbose] Readiness Indicator file check\\\\n2025-09-30T20:14:45Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-30T20:13:58Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bh9c9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:13:57Z\\\"}}\" for pod \"openshift-multus\"/\"multus-c5crr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.002319 4919 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-cdffv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bef69394-3e21-4893-a952-1a0e1817e00f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-30T20:14:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://51b94f535595f4d0a37a1b7b1ab9bfa7d3931ee81f38b875291f1a1ca23379d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-30T20:14:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-td7xg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-30T20:14:00Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-cdffv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-30T20:15:05Z is after 2025-08-24T17:21:41Z" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.059345 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.059414 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.059431 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.059456 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.059474 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:06Z","lastTransitionTime":"2025-09-30T20:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.162605 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.162901 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.162911 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.162926 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.162935 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:06Z","lastTransitionTime":"2025-09-30T20:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.266617 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.266705 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.266724 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.266748 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.266765 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:06Z","lastTransitionTime":"2025-09-30T20:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.369871 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.369920 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.369937 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.369959 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.369977 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:06Z","lastTransitionTime":"2025-09-30T20:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.473159 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.473205 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.473242 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.473262 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.473276 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:06Z","lastTransitionTime":"2025-09-30T20:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.576492 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.576572 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.576595 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.576627 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.576648 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:06Z","lastTransitionTime":"2025-09-30T20:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.631587 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.631672 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:06 crc kubenswrapper[4919]: E0930 20:15:06.631833 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.631867 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:06 crc kubenswrapper[4919]: E0930 20:15:06.632131 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:06 crc kubenswrapper[4919]: E0930 20:15:06.632024 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.679174 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.679229 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.679238 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.679253 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.679263 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:06Z","lastTransitionTime":"2025-09-30T20:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.782430 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.782508 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.782529 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.782561 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.782586 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:06Z","lastTransitionTime":"2025-09-30T20:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.885999 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.886051 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.886064 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.886086 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.886098 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:06Z","lastTransitionTime":"2025-09-30T20:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.989554 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.989619 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.989637 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.989663 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:06 crc kubenswrapper[4919]: I0930 20:15:06.989684 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:06Z","lastTransitionTime":"2025-09-30T20:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.092197 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.092282 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.092298 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.092321 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.092337 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:07Z","lastTransitionTime":"2025-09-30T20:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.195156 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.195257 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.195282 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.195310 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.195328 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:07Z","lastTransitionTime":"2025-09-30T20:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.298856 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.298917 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.298940 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.298972 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.298992 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:07Z","lastTransitionTime":"2025-09-30T20:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.402369 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.402445 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.402468 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.402498 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.402520 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:07Z","lastTransitionTime":"2025-09-30T20:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.505729 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.505797 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.505816 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.505840 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.505858 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:07Z","lastTransitionTime":"2025-09-30T20:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.608513 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.608640 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.608660 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.608685 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.608702 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:07Z","lastTransitionTime":"2025-09-30T20:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.632669 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:07 crc kubenswrapper[4919]: E0930 20:15:07.632866 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.710872 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.710910 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.710920 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.710937 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.710950 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:07Z","lastTransitionTime":"2025-09-30T20:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.814363 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.814478 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.814500 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.814523 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.814541 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:07Z","lastTransitionTime":"2025-09-30T20:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.917672 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.917774 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.917797 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.917826 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:07 crc kubenswrapper[4919]: I0930 20:15:07.917846 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:07Z","lastTransitionTime":"2025-09-30T20:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.020302 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.020371 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.020393 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.020419 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.020438 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:08Z","lastTransitionTime":"2025-09-30T20:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.123714 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.123810 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.123833 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.123862 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.123918 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:08Z","lastTransitionTime":"2025-09-30T20:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.227363 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.227435 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.227455 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.227481 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.227498 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:08Z","lastTransitionTime":"2025-09-30T20:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.331260 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.331356 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.331387 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.331420 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.331444 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:08Z","lastTransitionTime":"2025-09-30T20:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.435169 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.435254 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.435272 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.435296 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.435313 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:08Z","lastTransitionTime":"2025-09-30T20:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.540271 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.540334 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.540356 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.540384 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.540405 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:08Z","lastTransitionTime":"2025-09-30T20:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.632122 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.632160 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:08 crc kubenswrapper[4919]: E0930 20:15:08.632261 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.632138 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:08 crc kubenswrapper[4919]: E0930 20:15:08.632380 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:08 crc kubenswrapper[4919]: E0930 20:15:08.632674 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.643034 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.643118 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.643143 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.643178 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.643203 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:08Z","lastTransitionTime":"2025-09-30T20:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.746253 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.746319 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.746336 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.746364 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.746383 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:08Z","lastTransitionTime":"2025-09-30T20:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.849464 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.849532 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.849550 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.849578 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.849596 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:08Z","lastTransitionTime":"2025-09-30T20:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.952975 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.953029 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.953046 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.953070 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:08 crc kubenswrapper[4919]: I0930 20:15:08.953088 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:08Z","lastTransitionTime":"2025-09-30T20:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.056368 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.056444 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.056468 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.056502 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.056524 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:09Z","lastTransitionTime":"2025-09-30T20:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.160006 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.160065 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.160081 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.160107 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.160125 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:09Z","lastTransitionTime":"2025-09-30T20:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.262611 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.262683 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.262707 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.262738 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.262759 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:09Z","lastTransitionTime":"2025-09-30T20:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.365041 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.365092 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.365110 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.365133 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.365150 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:09Z","lastTransitionTime":"2025-09-30T20:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.470192 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.470284 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.470301 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.470326 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.470347 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:09Z","lastTransitionTime":"2025-09-30T20:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.573446 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.573523 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.573541 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.573568 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.573588 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:09Z","lastTransitionTime":"2025-09-30T20:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.631243 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:09 crc kubenswrapper[4919]: E0930 20:15:09.631467 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.676764 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.676826 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.676845 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.676895 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.676914 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:09Z","lastTransitionTime":"2025-09-30T20:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.779493 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.779558 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.779577 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.779598 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.779613 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:09Z","lastTransitionTime":"2025-09-30T20:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.882516 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.882558 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.882570 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.882588 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.882599 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:09Z","lastTransitionTime":"2025-09-30T20:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.985947 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.985984 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.985993 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.986010 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:09 crc kubenswrapper[4919]: I0930 20:15:09.986036 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:09Z","lastTransitionTime":"2025-09-30T20:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.089107 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.089151 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.089166 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.089192 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.089207 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:10Z","lastTransitionTime":"2025-09-30T20:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.192495 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.192552 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.192569 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.192593 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.192611 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:10Z","lastTransitionTime":"2025-09-30T20:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.296169 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.296282 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.296307 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.296333 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.296350 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:10Z","lastTransitionTime":"2025-09-30T20:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.398925 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.398966 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.398976 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.398990 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.399001 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:10Z","lastTransitionTime":"2025-09-30T20:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.501765 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.501842 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.501863 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.501890 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.501915 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:10Z","lastTransitionTime":"2025-09-30T20:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.605018 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.605059 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.605071 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.605095 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.605107 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:10Z","lastTransitionTime":"2025-09-30T20:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.631801 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.632028 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:10 crc kubenswrapper[4919]: E0930 20:15:10.632056 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.632083 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:10 crc kubenswrapper[4919]: E0930 20:15:10.632207 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:10 crc kubenswrapper[4919]: E0930 20:15:10.632336 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.708438 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.708513 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.708536 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.708566 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.708586 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:10Z","lastTransitionTime":"2025-09-30T20:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.811866 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.811918 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.811929 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.811953 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.811967 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:10Z","lastTransitionTime":"2025-09-30T20:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.914609 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.914644 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.914656 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.914674 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:10 crc kubenswrapper[4919]: I0930 20:15:10.914685 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:10Z","lastTransitionTime":"2025-09-30T20:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.017467 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.017527 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.017549 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.017575 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.017592 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:11Z","lastTransitionTime":"2025-09-30T20:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.119514 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.119562 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.119574 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.119592 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.119606 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:11Z","lastTransitionTime":"2025-09-30T20:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.221489 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.221521 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.221529 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.221542 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.221551 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:11Z","lastTransitionTime":"2025-09-30T20:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.324936 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.324997 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.325014 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.325041 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.325065 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:11Z","lastTransitionTime":"2025-09-30T20:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.427527 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.427572 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.427585 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.427606 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.427620 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:11Z","lastTransitionTime":"2025-09-30T20:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.530795 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.531064 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.531153 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.531274 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.531366 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:11Z","lastTransitionTime":"2025-09-30T20:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.631832 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:11 crc kubenswrapper[4919]: E0930 20:15:11.632142 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.634626 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.634709 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.634731 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.634759 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.634782 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:11Z","lastTransitionTime":"2025-09-30T20:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.737597 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.737646 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.737657 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.737675 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.737687 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:11Z","lastTransitionTime":"2025-09-30T20:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.840794 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.840863 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.840890 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.840922 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.840946 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:11Z","lastTransitionTime":"2025-09-30T20:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.943901 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.943972 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.943990 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.944017 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:11 crc kubenswrapper[4919]: I0930 20:15:11.944036 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:11Z","lastTransitionTime":"2025-09-30T20:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.046901 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.046972 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.046994 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.047022 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.047045 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:12Z","lastTransitionTime":"2025-09-30T20:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.149747 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.149796 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.149819 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.149841 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.149854 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:12Z","lastTransitionTime":"2025-09-30T20:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.251980 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.252027 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.252040 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.252058 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.252069 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:12Z","lastTransitionTime":"2025-09-30T20:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.355041 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.355097 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.355108 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.355122 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.355132 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:12Z","lastTransitionTime":"2025-09-30T20:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.458162 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.458208 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.458230 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.458246 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.458256 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:12Z","lastTransitionTime":"2025-09-30T20:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.560869 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.560939 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.560961 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.560994 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.561019 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:12Z","lastTransitionTime":"2025-09-30T20:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.631979 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.632022 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.632084 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:12 crc kubenswrapper[4919]: E0930 20:15:12.632203 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:12 crc kubenswrapper[4919]: E0930 20:15:12.632478 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:12 crc kubenswrapper[4919]: E0930 20:15:12.632554 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.663853 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.663911 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.663930 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.663950 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.663964 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:12Z","lastTransitionTime":"2025-09-30T20:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.765975 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.766038 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.766055 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.766080 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.766097 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:12Z","lastTransitionTime":"2025-09-30T20:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.869295 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.869348 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.869363 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.869383 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.869397 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:12Z","lastTransitionTime":"2025-09-30T20:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.972523 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.972575 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.972587 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.972610 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:12 crc kubenswrapper[4919]: I0930 20:15:12.972623 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:12Z","lastTransitionTime":"2025-09-30T20:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.079367 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.079441 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.079461 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.079485 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.079504 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:13Z","lastTransitionTime":"2025-09-30T20:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.181297 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.181344 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.181356 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.181374 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.181387 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:13Z","lastTransitionTime":"2025-09-30T20:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.283846 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.283895 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.283907 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.283926 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.283939 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:13Z","lastTransitionTime":"2025-09-30T20:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.386914 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.386961 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.386972 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.386990 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.387000 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:13Z","lastTransitionTime":"2025-09-30T20:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.490982 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.491092 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.491117 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.491142 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.491161 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:13Z","lastTransitionTime":"2025-09-30T20:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.518482 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.518560 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.518582 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.518609 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.518627 4919 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-30T20:15:13Z","lastTransitionTime":"2025-09-30T20:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.569822 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p"] Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.570436 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.573473 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.573759 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.573783 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.574053 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.623626 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podStartSLOduration=77.623610534 podStartE2EDuration="1m17.623610534s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:13.622607335 +0000 UTC m=+98.738640502" watchObservedRunningTime="2025-09-30 20:15:13.623610534 +0000 UTC m=+98.739643661" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.631930 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:13 crc kubenswrapper[4919]: E0930 20:15:13.632102 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.642682 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-c5crr" podStartSLOduration=76.642665826 podStartE2EDuration="1m16.642665826s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:13.641543513 +0000 UTC m=+98.757576640" watchObservedRunningTime="2025-09-30 20:15:13.642665826 +0000 UTC m=+98.758698953" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.682042 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.682135 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.682171 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.682206 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.682325 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-service-ca\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.692419 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=79.692395624 podStartE2EDuration="1m19.692395624s" podCreationTimestamp="2025-09-30 20:13:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:13.69190549 +0000 UTC m=+98.807938637" watchObservedRunningTime="2025-09-30 20:15:13.692395624 +0000 UTC m=+98.808428781" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.751945 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-cdffv" podStartSLOduration=77.751924846 podStartE2EDuration="1m17.751924846s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:13.751347239 +0000 UTC m=+98.867380446" watchObservedRunningTime="2025-09-30 20:15:13.751924846 +0000 UTC m=+98.867957983" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.783123 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.783192 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.783204 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.783273 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.783302 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.783335 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-service-ca\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.783467 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.784351 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-service-ca\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.793284 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.803618 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fcc6111-1ca5-4fb0-9139-4bb3af258b6e-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-s4g9p\" (UID: \"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.807022 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=75.807001029 podStartE2EDuration="1m15.807001029s" podCreationTimestamp="2025-09-30 20:13:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:13.806054031 +0000 UTC m=+98.922087178" watchObservedRunningTime="2025-09-30 20:15:13.807001029 +0000 UTC m=+98.923034186" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.829937 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-s6g9s" podStartSLOduration=76.829909141 podStartE2EDuration="1m16.829909141s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:13.829006445 +0000 UTC m=+98.945039592" watchObservedRunningTime="2025-09-30 20:15:13.829909141 +0000 UTC m=+98.945942308" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.843966 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-tvtj4" podStartSLOduration=76.843948977 podStartE2EDuration="1m16.843948977s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:13.843422372 +0000 UTC m=+98.959455539" watchObservedRunningTime="2025-09-30 20:15:13.843948977 +0000 UTC m=+98.959982114" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.857814 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-5xx2l" podStartSLOduration=77.857788688 podStartE2EDuration="1m17.857788688s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:13.857588552 +0000 UTC m=+98.973621689" watchObservedRunningTime="2025-09-30 20:15:13.857788688 +0000 UTC m=+98.973821825" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.890728 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.909347 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=25.909332468 podStartE2EDuration="25.909332468s" podCreationTimestamp="2025-09-30 20:14:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:13.86719417 +0000 UTC m=+98.983227307" watchObservedRunningTime="2025-09-30 20:15:13.909332468 +0000 UTC m=+99.025365605" Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.910058 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=74.910051258 podStartE2EDuration="1m14.910051258s" podCreationTimestamp="2025-09-30 20:13:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:13.908658028 +0000 UTC m=+99.024691165" watchObservedRunningTime="2025-09-30 20:15:13.910051258 +0000 UTC m=+99.026084395" Sep 30 20:15:13 crc kubenswrapper[4919]: W0930 20:15:13.910367 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9fcc6111_1ca5_4fb0_9139_4bb3af258b6e.slice/crio-2f15be39d5f7080868d186c3051f9b2ea4245ea9340730a00ef8b3101e9b026c WatchSource:0}: Error finding container 2f15be39d5f7080868d186c3051f9b2ea4245ea9340730a00ef8b3101e9b026c: Status 404 returned error can't find the container with id 2f15be39d5f7080868d186c3051f9b2ea4245ea9340730a00ef8b3101e9b026c Sep 30 20:15:13 crc kubenswrapper[4919]: I0930 20:15:13.943052 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=51.943036482 podStartE2EDuration="51.943036482s" podCreationTimestamp="2025-09-30 20:14:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:13.939477269 +0000 UTC m=+99.055510406" watchObservedRunningTime="2025-09-30 20:15:13.943036482 +0000 UTC m=+99.059069609" Sep 30 20:15:14 crc kubenswrapper[4919]: I0930 20:15:14.254334 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" event={"ID":"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e","Type":"ContainerStarted","Data":"717820fdd5d75e7a16d76c64fc17affa1abd6984e04e4ba69abd0477b31fff63"} Sep 30 20:15:14 crc kubenswrapper[4919]: I0930 20:15:14.254392 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" event={"ID":"9fcc6111-1ca5-4fb0-9139-4bb3af258b6e","Type":"ContainerStarted","Data":"2f15be39d5f7080868d186c3051f9b2ea4245ea9340730a00ef8b3101e9b026c"} Sep 30 20:15:14 crc kubenswrapper[4919]: I0930 20:15:14.267632 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s4g9p" podStartSLOduration=78.26761386 podStartE2EDuration="1m18.26761386s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:14.267186488 +0000 UTC m=+99.383219635" watchObservedRunningTime="2025-09-30 20:15:14.26761386 +0000 UTC m=+99.383646987" Sep 30 20:15:14 crc kubenswrapper[4919]: I0930 20:15:14.631882 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:14 crc kubenswrapper[4919]: E0930 20:15:14.632731 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:14 crc kubenswrapper[4919]: I0930 20:15:14.632013 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:14 crc kubenswrapper[4919]: E0930 20:15:14.632986 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:14 crc kubenswrapper[4919]: I0930 20:15:14.631964 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:14 crc kubenswrapper[4919]: E0930 20:15:14.633234 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:15 crc kubenswrapper[4919]: I0930 20:15:15.632168 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:15 crc kubenswrapper[4919]: E0930 20:15:15.633175 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:15 crc kubenswrapper[4919]: I0930 20:15:15.806858 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:15 crc kubenswrapper[4919]: E0930 20:15:15.807008 4919 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:15:15 crc kubenswrapper[4919]: E0930 20:15:15.807063 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs podName:c0624d31-70fc-4d66-a31b-4e67896ab40e nodeName:}" failed. No retries permitted until 2025-09-30 20:16:19.807046537 +0000 UTC m=+164.923079664 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs") pod "network-metrics-daemon-bwpdf" (UID: "c0624d31-70fc-4d66-a31b-4e67896ab40e") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 30 20:15:16 crc kubenswrapper[4919]: I0930 20:15:16.631995 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:16 crc kubenswrapper[4919]: E0930 20:15:16.632441 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:16 crc kubenswrapper[4919]: I0930 20:15:16.632107 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:16 crc kubenswrapper[4919]: E0930 20:15:16.632511 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:16 crc kubenswrapper[4919]: I0930 20:15:16.632003 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:16 crc kubenswrapper[4919]: E0930 20:15:16.632730 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:17 crc kubenswrapper[4919]: I0930 20:15:17.631712 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:17 crc kubenswrapper[4919]: E0930 20:15:17.631915 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:18 crc kubenswrapper[4919]: I0930 20:15:18.632407 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:18 crc kubenswrapper[4919]: I0930 20:15:18.632437 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:18 crc kubenswrapper[4919]: I0930 20:15:18.632898 4919 scope.go:117] "RemoveContainer" containerID="01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce" Sep 30 20:15:18 crc kubenswrapper[4919]: E0930 20:15:18.633123 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-4p25c_openshift-ovn-kubernetes(af48d482-2587-4521-ba91-56d35b0e487d)\"" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" Sep 30 20:15:18 crc kubenswrapper[4919]: E0930 20:15:18.633147 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:18 crc kubenswrapper[4919]: E0930 20:15:18.633596 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:18 crc kubenswrapper[4919]: I0930 20:15:18.633595 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:18 crc kubenswrapper[4919]: E0930 20:15:18.633705 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:19 crc kubenswrapper[4919]: I0930 20:15:19.632271 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:19 crc kubenswrapper[4919]: E0930 20:15:19.632517 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:20 crc kubenswrapper[4919]: I0930 20:15:20.631993 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:20 crc kubenswrapper[4919]: I0930 20:15:20.631995 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:20 crc kubenswrapper[4919]: I0930 20:15:20.632104 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:20 crc kubenswrapper[4919]: E0930 20:15:20.632755 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:20 crc kubenswrapper[4919]: E0930 20:15:20.632875 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:20 crc kubenswrapper[4919]: E0930 20:15:20.633018 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:21 crc kubenswrapper[4919]: I0930 20:15:21.631652 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:21 crc kubenswrapper[4919]: E0930 20:15:21.631862 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:22 crc kubenswrapper[4919]: I0930 20:15:22.632158 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:22 crc kubenswrapper[4919]: E0930 20:15:22.632412 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:22 crc kubenswrapper[4919]: I0930 20:15:22.632446 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:22 crc kubenswrapper[4919]: I0930 20:15:22.632495 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:22 crc kubenswrapper[4919]: E0930 20:15:22.632787 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:22 crc kubenswrapper[4919]: E0930 20:15:22.632975 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:23 crc kubenswrapper[4919]: I0930 20:15:23.631851 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:23 crc kubenswrapper[4919]: E0930 20:15:23.632014 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:24 crc kubenswrapper[4919]: I0930 20:15:24.631446 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:24 crc kubenswrapper[4919]: I0930 20:15:24.631538 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:24 crc kubenswrapper[4919]: I0930 20:15:24.631460 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:24 crc kubenswrapper[4919]: E0930 20:15:24.631640 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:24 crc kubenswrapper[4919]: E0930 20:15:24.631788 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:24 crc kubenswrapper[4919]: E0930 20:15:24.631944 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:25 crc kubenswrapper[4919]: I0930 20:15:25.631417 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:25 crc kubenswrapper[4919]: E0930 20:15:25.633889 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:26 crc kubenswrapper[4919]: I0930 20:15:26.631511 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:26 crc kubenswrapper[4919]: I0930 20:15:26.631539 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:26 crc kubenswrapper[4919]: E0930 20:15:26.631668 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:26 crc kubenswrapper[4919]: I0930 20:15:26.631539 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:26 crc kubenswrapper[4919]: E0930 20:15:26.631823 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:26 crc kubenswrapper[4919]: E0930 20:15:26.631941 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:27 crc kubenswrapper[4919]: I0930 20:15:27.631443 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:27 crc kubenswrapper[4919]: E0930 20:15:27.631878 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:28 crc kubenswrapper[4919]: I0930 20:15:28.631148 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:28 crc kubenswrapper[4919]: I0930 20:15:28.631163 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:28 crc kubenswrapper[4919]: E0930 20:15:28.631434 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:28 crc kubenswrapper[4919]: I0930 20:15:28.631561 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:28 crc kubenswrapper[4919]: E0930 20:15:28.632515 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:28 crc kubenswrapper[4919]: E0930 20:15:28.632739 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:29 crc kubenswrapper[4919]: I0930 20:15:29.632303 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:29 crc kubenswrapper[4919]: E0930 20:15:29.632477 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:30 crc kubenswrapper[4919]: I0930 20:15:30.631835 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:30 crc kubenswrapper[4919]: I0930 20:15:30.631980 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:30 crc kubenswrapper[4919]: I0930 20:15:30.632007 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:30 crc kubenswrapper[4919]: E0930 20:15:30.632062 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:30 crc kubenswrapper[4919]: E0930 20:15:30.632176 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:30 crc kubenswrapper[4919]: E0930 20:15:30.633792 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:30 crc kubenswrapper[4919]: I0930 20:15:30.633981 4919 scope.go:117] "RemoveContainer" containerID="01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce" Sep 30 20:15:31 crc kubenswrapper[4919]: I0930 20:15:31.359524 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-c5crr_e3e33a72-0a49-4944-a2c2-ac16183942cf/kube-multus/1.log" Sep 30 20:15:31 crc kubenswrapper[4919]: I0930 20:15:31.360963 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-c5crr_e3e33a72-0a49-4944-a2c2-ac16183942cf/kube-multus/0.log" Sep 30 20:15:31 crc kubenswrapper[4919]: I0930 20:15:31.361029 4919 generic.go:334] "Generic (PLEG): container finished" podID="e3e33a72-0a49-4944-a2c2-ac16183942cf" containerID="aea57d9136d938eb7249c500e655688ca888930b02435845a1dcdf1712da37fb" exitCode=1 Sep 30 20:15:31 crc kubenswrapper[4919]: I0930 20:15:31.361136 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-c5crr" event={"ID":"e3e33a72-0a49-4944-a2c2-ac16183942cf","Type":"ContainerDied","Data":"aea57d9136d938eb7249c500e655688ca888930b02435845a1dcdf1712da37fb"} Sep 30 20:15:31 crc kubenswrapper[4919]: I0930 20:15:31.361193 4919 scope.go:117] "RemoveContainer" containerID="503ad7ec3a9a315979dc01dfd5d35043ca0575dccf604653ab04096aba8ce05a" Sep 30 20:15:31 crc kubenswrapper[4919]: I0930 20:15:31.361830 4919 scope.go:117] "RemoveContainer" containerID="aea57d9136d938eb7249c500e655688ca888930b02435845a1dcdf1712da37fb" Sep 30 20:15:31 crc kubenswrapper[4919]: E0930 20:15:31.362083 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-c5crr_openshift-multus(e3e33a72-0a49-4944-a2c2-ac16183942cf)\"" pod="openshift-multus/multus-c5crr" podUID="e3e33a72-0a49-4944-a2c2-ac16183942cf" Sep 30 20:15:31 crc kubenswrapper[4919]: I0930 20:15:31.367536 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/3.log" Sep 30 20:15:31 crc kubenswrapper[4919]: I0930 20:15:31.370054 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerStarted","Data":"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1"} Sep 30 20:15:31 crc kubenswrapper[4919]: I0930 20:15:31.370535 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:15:31 crc kubenswrapper[4919]: I0930 20:15:31.416294 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podStartSLOduration=94.416272457 podStartE2EDuration="1m34.416272457s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:31.414674941 +0000 UTC m=+116.530708078" watchObservedRunningTime="2025-09-30 20:15:31.416272457 +0000 UTC m=+116.532305604" Sep 30 20:15:31 crc kubenswrapper[4919]: I0930 20:15:31.631889 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:31 crc kubenswrapper[4919]: E0930 20:15:31.632411 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:31 crc kubenswrapper[4919]: I0930 20:15:31.749588 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-bwpdf"] Sep 30 20:15:32 crc kubenswrapper[4919]: I0930 20:15:32.376163 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-c5crr_e3e33a72-0a49-4944-a2c2-ac16183942cf/kube-multus/1.log" Sep 30 20:15:32 crc kubenswrapper[4919]: I0930 20:15:32.376341 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:32 crc kubenswrapper[4919]: E0930 20:15:32.376500 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:32 crc kubenswrapper[4919]: I0930 20:15:32.631256 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:32 crc kubenswrapper[4919]: I0930 20:15:32.631351 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:32 crc kubenswrapper[4919]: I0930 20:15:32.631265 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:32 crc kubenswrapper[4919]: E0930 20:15:32.631459 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:32 crc kubenswrapper[4919]: E0930 20:15:32.631534 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:32 crc kubenswrapper[4919]: E0930 20:15:32.631651 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:34 crc kubenswrapper[4919]: I0930 20:15:34.631332 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:34 crc kubenswrapper[4919]: I0930 20:15:34.631398 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:34 crc kubenswrapper[4919]: I0930 20:15:34.631355 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:34 crc kubenswrapper[4919]: E0930 20:15:34.631540 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:34 crc kubenswrapper[4919]: I0930 20:15:34.631631 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:34 crc kubenswrapper[4919]: E0930 20:15:34.631678 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:34 crc kubenswrapper[4919]: E0930 20:15:34.631878 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:34 crc kubenswrapper[4919]: E0930 20:15:34.632042 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:35 crc kubenswrapper[4919]: E0930 20:15:35.616699 4919 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 30 20:15:35 crc kubenswrapper[4919]: E0930 20:15:35.726116 4919 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 20:15:36 crc kubenswrapper[4919]: I0930 20:15:36.632061 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:36 crc kubenswrapper[4919]: E0930 20:15:36.632656 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:36 crc kubenswrapper[4919]: I0930 20:15:36.632190 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:36 crc kubenswrapper[4919]: I0930 20:15:36.632258 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:36 crc kubenswrapper[4919]: I0930 20:15:36.632143 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:36 crc kubenswrapper[4919]: E0930 20:15:36.633588 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:36 crc kubenswrapper[4919]: E0930 20:15:36.633601 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:36 crc kubenswrapper[4919]: E0930 20:15:36.633434 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:38 crc kubenswrapper[4919]: I0930 20:15:38.631560 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:38 crc kubenswrapper[4919]: I0930 20:15:38.631660 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:38 crc kubenswrapper[4919]: E0930 20:15:38.632789 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:38 crc kubenswrapper[4919]: I0930 20:15:38.631734 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:38 crc kubenswrapper[4919]: E0930 20:15:38.633034 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:38 crc kubenswrapper[4919]: I0930 20:15:38.631708 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:38 crc kubenswrapper[4919]: E0930 20:15:38.632849 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:38 crc kubenswrapper[4919]: E0930 20:15:38.633340 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:40 crc kubenswrapper[4919]: I0930 20:15:40.631562 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:40 crc kubenswrapper[4919]: I0930 20:15:40.631562 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:40 crc kubenswrapper[4919]: I0930 20:15:40.631563 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:40 crc kubenswrapper[4919]: I0930 20:15:40.631727 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:40 crc kubenswrapper[4919]: E0930 20:15:40.631963 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:40 crc kubenswrapper[4919]: E0930 20:15:40.632128 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:40 crc kubenswrapper[4919]: E0930 20:15:40.632322 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:40 crc kubenswrapper[4919]: E0930 20:15:40.632499 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:40 crc kubenswrapper[4919]: E0930 20:15:40.727771 4919 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 20:15:42 crc kubenswrapper[4919]: I0930 20:15:42.632022 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:42 crc kubenswrapper[4919]: I0930 20:15:42.632133 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:42 crc kubenswrapper[4919]: I0930 20:15:42.632180 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:42 crc kubenswrapper[4919]: I0930 20:15:42.632383 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:42 crc kubenswrapper[4919]: E0930 20:15:42.632375 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:42 crc kubenswrapper[4919]: I0930 20:15:42.632596 4919 scope.go:117] "RemoveContainer" containerID="aea57d9136d938eb7249c500e655688ca888930b02435845a1dcdf1712da37fb" Sep 30 20:15:42 crc kubenswrapper[4919]: E0930 20:15:42.632565 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:42 crc kubenswrapper[4919]: E0930 20:15:42.632650 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:42 crc kubenswrapper[4919]: E0930 20:15:42.632773 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:43 crc kubenswrapper[4919]: I0930 20:15:43.420645 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-c5crr_e3e33a72-0a49-4944-a2c2-ac16183942cf/kube-multus/1.log" Sep 30 20:15:43 crc kubenswrapper[4919]: I0930 20:15:43.421107 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-c5crr" event={"ID":"e3e33a72-0a49-4944-a2c2-ac16183942cf","Type":"ContainerStarted","Data":"87b1b6f7c7b9294d3e1f2cf6de4cc7f91699916fcbc8ea3e63c60eeede5a5879"} Sep 30 20:15:44 crc kubenswrapper[4919]: I0930 20:15:44.631344 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:44 crc kubenswrapper[4919]: I0930 20:15:44.631433 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:44 crc kubenswrapper[4919]: I0930 20:15:44.631401 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:44 crc kubenswrapper[4919]: I0930 20:15:44.631362 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:44 crc kubenswrapper[4919]: E0930 20:15:44.631596 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-bwpdf" podUID="c0624d31-70fc-4d66-a31b-4e67896ab40e" Sep 30 20:15:44 crc kubenswrapper[4919]: E0930 20:15:44.631753 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 30 20:15:44 crc kubenswrapper[4919]: E0930 20:15:44.631884 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 30 20:15:44 crc kubenswrapper[4919]: E0930 20:15:44.631958 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 30 20:15:46 crc kubenswrapper[4919]: I0930 20:15:46.631791 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:15:46 crc kubenswrapper[4919]: I0930 20:15:46.631835 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:15:46 crc kubenswrapper[4919]: I0930 20:15:46.631978 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:15:46 crc kubenswrapper[4919]: I0930 20:15:46.632020 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:15:46 crc kubenswrapper[4919]: I0930 20:15:46.634715 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 30 20:15:46 crc kubenswrapper[4919]: I0930 20:15:46.635648 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 30 20:15:46 crc kubenswrapper[4919]: I0930 20:15:46.636903 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 30 20:15:46 crc kubenswrapper[4919]: I0930 20:15:46.637029 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 30 20:15:46 crc kubenswrapper[4919]: I0930 20:15:46.637271 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 30 20:15:46 crc kubenswrapper[4919]: I0930 20:15:46.637826 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 30 20:15:51 crc kubenswrapper[4919]: I0930 20:15:51.810162 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.572203 4919 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.630143 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xns2c"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.631208 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.633969 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.635045 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.635386 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.635417 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.635521 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.635983 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.637833 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.638624 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.643784 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.645186 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.645633 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.646501 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.647310 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.647611 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.647887 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.648248 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.648772 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.652790 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.653582 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.653727 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.653593 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.654006 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.654090 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.654279 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.648255 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-s5kls"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.654549 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.655157 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.655838 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.656955 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.660691 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-m9k8f"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.661111 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kngwc"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.661563 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.662182 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.663040 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-2d7ng"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.663937 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.664661 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2lzkb"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.680336 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.681282 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.685533 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.699423 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.699779 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.699896 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.700500 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.701134 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.701162 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.701504 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2d7ng" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.701798 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.701888 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.702415 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.702469 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.702738 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.703448 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-qxlpx"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.703933 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.704494 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.704632 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.704595 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.704857 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.704539 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-4h84d"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.705735 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.705744 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4h84d" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.707057 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.707340 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.713700 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.718232 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.719679 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.719726 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.719972 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.720163 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.720229 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.720398 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.720628 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.721070 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.721868 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.722434 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.723675 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.725034 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.725426 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.725684 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.726084 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.726383 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.727308 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.730691 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zpbl8"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.731104 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fdmjq"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.731506 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.731722 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.733769 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.734094 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.734233 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.734346 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.735559 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.736029 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.736048 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-s5kls"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.736119 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.736793 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.736836 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.736876 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.736838 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.736799 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.737042 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.737089 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.737138 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.737266 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.737353 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.737526 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.737845 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.737879 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.738011 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.738111 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.738937 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.739046 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.749632 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.749918 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.750102 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.750979 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.751802 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.751907 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.752120 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.752640 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.755478 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.757430 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.758193 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.758347 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.758357 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.758912 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.759414 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.759558 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.759598 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.771617 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.772687 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.772880 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.773017 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.773110 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.773255 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.773401 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.773624 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.773710 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.773847 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.773868 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.773894 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-gq4l8"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.774000 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.774076 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.774252 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.778955 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.779657 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.779729 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780092 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780120 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780142 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/89c9a653-2d79-4af8-9ee0-04dd3058a692-encryption-config\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780169 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/98d93a4f-32ca-41db-9776-8bf3bad8727d-serving-cert\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780184 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780199 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/89c9a653-2d79-4af8-9ee0-04dd3058a692-etcd-client\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780235 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-client-ca\") pod \"route-controller-manager-6576b87f9c-8brdq\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780254 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f25ddd0d-0dc9-442d-a893-6562032c3b95-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-j9pft\" (UID: \"f25ddd0d-0dc9-442d-a893-6562032c3b95\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780271 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-image-import-ca\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780285 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780300 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/42c8738a-aad8-4cc5-b18f-92eee2745673-audit-dir\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780322 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zn5w\" (UniqueName: \"kubernetes.io/projected/42c8738a-aad8-4cc5-b18f-92eee2745673-kube-api-access-9zn5w\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780339 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a68aedae-26b8-412b-b1af-383086a8e93f-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780357 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7478385-0c3c-4f02-ab80-fc2258357773-auth-proxy-config\") pod \"machine-approver-56656f9798-4phzk\" (UID: \"e7478385-0c3c-4f02-ab80-fc2258357773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780376 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780391 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780406 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7478385-0c3c-4f02-ab80-fc2258357773-config\") pod \"machine-approver-56656f9798-4phzk\" (UID: \"e7478385-0c3c-4f02-ab80-fc2258357773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780420 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/98d93a4f-32ca-41db-9776-8bf3bad8727d-node-pullsecrets\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780438 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g94tn\" (UniqueName: \"kubernetes.io/projected/cd4a5780-a1bb-4918-b54e-afd17c1dd9e1-kube-api-access-g94tn\") pod \"openshift-apiserver-operator-796bbdcf4f-6mx92\" (UID: \"cd4a5780-a1bb-4918-b54e-afd17c1dd9e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780455 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89c9a653-2d79-4af8-9ee0-04dd3058a692-serving-cert\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780470 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/89c9a653-2d79-4af8-9ee0-04dd3058a692-audit-dir\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780486 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/98d93a4f-32ca-41db-9776-8bf3bad8727d-encryption-config\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780501 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4rdz\" (UniqueName: \"kubernetes.io/projected/2fbcced3-31b3-462e-af0f-9d80537d7d55-kube-api-access-g4rdz\") pod \"cluster-samples-operator-665b6dd947-wq9f6\" (UID: \"2fbcced3-31b3-462e-af0f-9d80537d7d55\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780516 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89c9a653-2d79-4af8-9ee0-04dd3058a692-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780533 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd4a5780-a1bb-4918-b54e-afd17c1dd9e1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6mx92\" (UID: \"cd4a5780-a1bb-4918-b54e-afd17c1dd9e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780611 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780632 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4599a59e-3533-494d-b149-f84b3033c62c-serving-cert\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780647 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx52g\" (UniqueName: \"kubernetes.io/projected/4599a59e-3533-494d-b149-f84b3033c62c-kube-api-access-xx52g\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780687 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-config\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780713 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/98d93a4f-32ca-41db-9776-8bf3bad8727d-etcd-client\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780737 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780756 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m6mf\" (UniqueName: \"kubernetes.io/projected/e7478385-0c3c-4f02-ab80-fc2258357773-kube-api-access-8m6mf\") pod \"machine-approver-56656f9798-4phzk\" (UID: \"e7478385-0c3c-4f02-ab80-fc2258357773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780805 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780830 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-serving-cert\") pod \"route-controller-manager-6576b87f9c-8brdq\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780885 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-etcd-serving-ca\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780910 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwg4w\" (UniqueName: \"kubernetes.io/projected/98d93a4f-32ca-41db-9776-8bf3bad8727d-kube-api-access-xwg4w\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780933 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a68aedae-26b8-412b-b1af-383086a8e93f-config\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.780960 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-config\") pod \"route-controller-manager-6576b87f9c-8brdq\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781210 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781327 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-audit\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781387 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a68aedae-26b8-412b-b1af-383086a8e93f-service-ca-bundle\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781429 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6v4n\" (UniqueName: \"kubernetes.io/projected/f25ddd0d-0dc9-442d-a893-6562032c3b95-kube-api-access-j6v4n\") pod \"cluster-image-registry-operator-dc59b4c8b-j9pft\" (UID: \"f25ddd0d-0dc9-442d-a893-6562032c3b95\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781469 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-client-ca\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781516 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-audit-policies\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781609 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f25ddd0d-0dc9-442d-a893-6562032c3b95-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-j9pft\" (UID: \"f25ddd0d-0dc9-442d-a893-6562032c3b95\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781650 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-config\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781692 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/89c9a653-2d79-4af8-9ee0-04dd3058a692-audit-policies\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781780 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781823 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/89c9a653-2d79-4af8-9ee0-04dd3058a692-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781852 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2nv5\" (UniqueName: \"kubernetes.io/projected/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-kube-api-access-v2nv5\") pod \"route-controller-manager-6576b87f9c-8brdq\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781892 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2fbcced3-31b3-462e-af0f-9d80537d7d55-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-wq9f6\" (UID: \"2fbcced3-31b3-462e-af0f-9d80537d7d55\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.781927 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wchrk\" (UniqueName: \"kubernetes.io/projected/23df7f75-7b68-4810-92a4-b0e7e39f9bf4-kube-api-access-wchrk\") pod \"downloads-7954f5f757-2d7ng\" (UID: \"23df7f75-7b68-4810-92a4-b0e7e39f9bf4\") " pod="openshift-console/downloads-7954f5f757-2d7ng" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.782001 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.782037 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-gq4l8" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.782055 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.782582 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd4a5780-a1bb-4918-b54e-afd17c1dd9e1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6mx92\" (UID: \"cd4a5780-a1bb-4918-b54e-afd17c1dd9e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.782627 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a68aedae-26b8-412b-b1af-383086a8e93f-serving-cert\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.782657 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f25ddd0d-0dc9-442d-a893-6562032c3b95-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-j9pft\" (UID: \"f25ddd0d-0dc9-442d-a893-6562032c3b95\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.782679 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/98d93a4f-32ca-41db-9776-8bf3bad8727d-audit-dir\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.782722 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.782723 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8fz2\" (UniqueName: \"kubernetes.io/projected/a68aedae-26b8-412b-b1af-383086a8e93f-kube-api-access-n8fz2\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.782923 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e7478385-0c3c-4f02-ab80-fc2258357773-machine-approver-tls\") pod \"machine-approver-56656f9798-4phzk\" (UID: \"e7478385-0c3c-4f02-ab80-fc2258357773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.782957 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rksk7\" (UniqueName: \"kubernetes.io/projected/89c9a653-2d79-4af8-9ee0-04dd3058a692-kube-api-access-rksk7\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.785716 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.785854 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.786199 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.786348 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.786894 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.787449 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.788166 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.791694 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.792597 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.792874 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-m9k8f"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.792900 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-prx6x"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.793381 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.793651 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.795415 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.795554 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xns2c"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.795587 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.804193 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-khpgg"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.810081 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.810710 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-c55sj"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.811305 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.811386 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.811826 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.811878 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-chftj"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.812051 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.812523 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.812529 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-chftj" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.816601 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.819532 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.821017 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.823814 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.824770 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.827657 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.829396 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.829561 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.831294 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.831624 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.834202 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-z2hvv"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.834394 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.855968 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-z2hvv" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.856199 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.860975 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.864901 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.866245 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.867364 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-k46x5"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.868564 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.869158 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kngwc"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.873542 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.873849 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.881079 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884340 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884685 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd4a5780-a1bb-4918-b54e-afd17c1dd9e1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6mx92\" (UID: \"cd4a5780-a1bb-4918-b54e-afd17c1dd9e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884713 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlpjx\" (UniqueName: \"kubernetes.io/projected/6da7486f-8911-4897-bf58-165a98baf2f8-kube-api-access-hlpjx\") pod \"kube-storage-version-migrator-operator-b67b599dd-mp48h\" (UID: \"6da7486f-8911-4897-bf58-165a98baf2f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884737 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a68aedae-26b8-412b-b1af-383086a8e93f-serving-cert\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884756 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f25ddd0d-0dc9-442d-a893-6562032c3b95-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-j9pft\" (UID: \"f25ddd0d-0dc9-442d-a893-6562032c3b95\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884775 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/61462d92-fbc3-462c-b847-7c5f5e8e457a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-nmz9x\" (UID: \"61462d92-fbc3-462c-b847-7c5f5e8e457a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884792 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/98d93a4f-32ca-41db-9776-8bf3bad8727d-audit-dir\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884809 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8fz2\" (UniqueName: \"kubernetes.io/projected/a68aedae-26b8-412b-b1af-383086a8e93f-kube-api-access-n8fz2\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884825 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e7478385-0c3c-4f02-ab80-fc2258357773-machine-approver-tls\") pod \"machine-approver-56656f9798-4phzk\" (UID: \"e7478385-0c3c-4f02-ab80-fc2258357773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884842 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rksk7\" (UniqueName: \"kubernetes.io/projected/89c9a653-2d79-4af8-9ee0-04dd3058a692-kube-api-access-rksk7\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884860 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884876 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884893 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3447472-d94d-4984-9b19-591fec8cc4b2-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-jjkdp\" (UID: \"d3447472-d94d-4984-9b19-591fec8cc4b2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884911 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdba9d13-b3b5-4a75-adf1-9b14ac993af1-config\") pod \"console-operator-58897d9998-2lzkb\" (UID: \"cdba9d13-b3b5-4a75-adf1-9b14ac993af1\") " pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884928 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/98d93a4f-32ca-41db-9776-8bf3bad8727d-serving-cert\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884942 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884957 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/89c9a653-2d79-4af8-9ee0-04dd3058a692-etcd-client\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884971 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/89c9a653-2d79-4af8-9ee0-04dd3058a692-encryption-config\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.884989 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6da7486f-8911-4897-bf58-165a98baf2f8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mp48h\" (UID: \"6da7486f-8911-4897-bf58-165a98baf2f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885012 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-client-ca\") pod \"route-controller-manager-6576b87f9c-8brdq\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885028 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f25ddd0d-0dc9-442d-a893-6562032c3b95-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-j9pft\" (UID: \"f25ddd0d-0dc9-442d-a893-6562032c3b95\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885043 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-948hs\" (UniqueName: \"kubernetes.io/projected/cdba9d13-b3b5-4a75-adf1-9b14ac993af1-kube-api-access-948hs\") pod \"console-operator-58897d9998-2lzkb\" (UID: \"cdba9d13-b3b5-4a75-adf1-9b14ac993af1\") " pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885058 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-image-import-ca\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885073 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885088 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/42c8738a-aad8-4cc5-b18f-92eee2745673-audit-dir\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885104 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3447472-d94d-4984-9b19-591fec8cc4b2-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-jjkdp\" (UID: \"d3447472-d94d-4984-9b19-591fec8cc4b2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885121 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6da7486f-8911-4897-bf58-165a98baf2f8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mp48h\" (UID: \"6da7486f-8911-4897-bf58-165a98baf2f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885138 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4psbg\" (UniqueName: \"kubernetes.io/projected/d858bbb5-c348-42d5-882f-03a21a91cbeb-kube-api-access-4psbg\") pod \"openshift-config-operator-7777fb866f-bvpwb\" (UID: \"d858bbb5-c348-42d5-882f-03a21a91cbeb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885163 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zn5w\" (UniqueName: \"kubernetes.io/projected/42c8738a-aad8-4cc5-b18f-92eee2745673-kube-api-access-9zn5w\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885180 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a68aedae-26b8-412b-b1af-383086a8e93f-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885197 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7478385-0c3c-4f02-ab80-fc2258357773-auth-proxy-config\") pod \"machine-approver-56656f9798-4phzk\" (UID: \"e7478385-0c3c-4f02-ab80-fc2258357773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885227 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885246 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885263 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7478385-0c3c-4f02-ab80-fc2258357773-config\") pod \"machine-approver-56656f9798-4phzk\" (UID: \"e7478385-0c3c-4f02-ab80-fc2258357773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885281 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d858bbb5-c348-42d5-882f-03a21a91cbeb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bvpwb\" (UID: \"d858bbb5-c348-42d5-882f-03a21a91cbeb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885298 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/98d93a4f-32ca-41db-9776-8bf3bad8727d-node-pullsecrets\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885314 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g94tn\" (UniqueName: \"kubernetes.io/projected/cd4a5780-a1bb-4918-b54e-afd17c1dd9e1-kube-api-access-g94tn\") pod \"openshift-apiserver-operator-796bbdcf4f-6mx92\" (UID: \"cd4a5780-a1bb-4918-b54e-afd17c1dd9e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885333 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89c9a653-2d79-4af8-9ee0-04dd3058a692-serving-cert\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885349 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61462d92-fbc3-462c-b847-7c5f5e8e457a-trusted-ca\") pod \"ingress-operator-5b745b69d9-nmz9x\" (UID: \"61462d92-fbc3-462c-b847-7c5f5e8e457a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885365 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/98d93a4f-32ca-41db-9776-8bf3bad8727d-encryption-config\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885384 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4rdz\" (UniqueName: \"kubernetes.io/projected/2fbcced3-31b3-462e-af0f-9d80537d7d55-kube-api-access-g4rdz\") pod \"cluster-samples-operator-665b6dd947-wq9f6\" (UID: \"2fbcced3-31b3-462e-af0f-9d80537d7d55\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885399 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89c9a653-2d79-4af8-9ee0-04dd3058a692-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885418 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/89c9a653-2d79-4af8-9ee0-04dd3058a692-audit-dir\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885453 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885469 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4599a59e-3533-494d-b149-f84b3033c62c-serving-cert\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885484 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx52g\" (UniqueName: \"kubernetes.io/projected/4599a59e-3533-494d-b149-f84b3033c62c-kube-api-access-xx52g\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885499 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd4a5780-a1bb-4918-b54e-afd17c1dd9e1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6mx92\" (UID: \"cd4a5780-a1bb-4918-b54e-afd17c1dd9e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885515 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-config\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885529 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/61462d92-fbc3-462c-b847-7c5f5e8e457a-metrics-tls\") pod \"ingress-operator-5b745b69d9-nmz9x\" (UID: \"61462d92-fbc3-462c-b847-7c5f5e8e457a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885546 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/98d93a4f-32ca-41db-9776-8bf3bad8727d-etcd-client\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885561 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdba9d13-b3b5-4a75-adf1-9b14ac993af1-serving-cert\") pod \"console-operator-58897d9998-2lzkb\" (UID: \"cdba9d13-b3b5-4a75-adf1-9b14ac993af1\") " pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885579 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885596 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m6mf\" (UniqueName: \"kubernetes.io/projected/e7478385-0c3c-4f02-ab80-fc2258357773-kube-api-access-8m6mf\") pod \"machine-approver-56656f9798-4phzk\" (UID: \"e7478385-0c3c-4f02-ab80-fc2258357773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885619 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885635 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-serving-cert\") pod \"route-controller-manager-6576b87f9c-8brdq\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885668 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-etcd-serving-ca\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885684 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwg4w\" (UniqueName: \"kubernetes.io/projected/98d93a4f-32ca-41db-9776-8bf3bad8727d-kube-api-access-xwg4w\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885700 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cdba9d13-b3b5-4a75-adf1-9b14ac993af1-trusted-ca\") pod \"console-operator-58897d9998-2lzkb\" (UID: \"cdba9d13-b3b5-4a75-adf1-9b14ac993af1\") " pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885716 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885816 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a68aedae-26b8-412b-b1af-383086a8e93f-config\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885833 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-config\") pod \"route-controller-manager-6576b87f9c-8brdq\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885850 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-audit\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885867 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a68aedae-26b8-412b-b1af-383086a8e93f-service-ca-bundle\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885907 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6v4n\" (UniqueName: \"kubernetes.io/projected/f25ddd0d-0dc9-442d-a893-6562032c3b95-kube-api-access-j6v4n\") pod \"cluster-image-registry-operator-dc59b4c8b-j9pft\" (UID: \"f25ddd0d-0dc9-442d-a893-6562032c3b95\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885931 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-client-ca\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885951 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3447472-d94d-4984-9b19-591fec8cc4b2-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-jjkdp\" (UID: \"d3447472-d94d-4984-9b19-591fec8cc4b2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885971 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-audit-policies\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.885998 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f25ddd0d-0dc9-442d-a893-6562032c3b95-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-j9pft\" (UID: \"f25ddd0d-0dc9-442d-a893-6562032c3b95\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886015 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-config\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886030 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/89c9a653-2d79-4af8-9ee0-04dd3058a692-audit-policies\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886059 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886075 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/89c9a653-2d79-4af8-9ee0-04dd3058a692-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886098 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d858bbb5-c348-42d5-882f-03a21a91cbeb-serving-cert\") pod \"openshift-config-operator-7777fb866f-bvpwb\" (UID: \"d858bbb5-c348-42d5-882f-03a21a91cbeb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886115 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2nv5\" (UniqueName: \"kubernetes.io/projected/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-kube-api-access-v2nv5\") pod \"route-controller-manager-6576b87f9c-8brdq\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886131 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2fbcced3-31b3-462e-af0f-9d80537d7d55-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-wq9f6\" (UID: \"2fbcced3-31b3-462e-af0f-9d80537d7d55\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886230 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wchrk\" (UniqueName: \"kubernetes.io/projected/23df7f75-7b68-4810-92a4-b0e7e39f9bf4-kube-api-access-wchrk\") pod \"downloads-7954f5f757-2d7ng\" (UID: \"23df7f75-7b68-4810-92a4-b0e7e39f9bf4\") " pod="openshift-console/downloads-7954f5f757-2d7ng" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886247 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886262 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886278 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k8gt\" (UniqueName: \"kubernetes.io/projected/61462d92-fbc3-462c-b847-7c5f5e8e457a-kube-api-access-8k8gt\") pod \"ingress-operator-5b745b69d9-nmz9x\" (UID: \"61462d92-fbc3-462c-b847-7c5f5e8e457a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886362 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.886620 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e7478385-0c3c-4f02-ab80-fc2258357773-auth-proxy-config\") pod \"machine-approver-56656f9798-4phzk\" (UID: \"e7478385-0c3c-4f02-ab80-fc2258357773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.887091 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd4a5780-a1bb-4918-b54e-afd17c1dd9e1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6mx92\" (UID: \"cd4a5780-a1bb-4918-b54e-afd17c1dd9e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.887920 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89c9a653-2d79-4af8-9ee0-04dd3058a692-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.888457 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-audit-policies\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.888493 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/98d93a4f-32ca-41db-9776-8bf3bad8727d-audit-dir\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.889472 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-config\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.890662 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-etcd-serving-ca\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.891200 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a68aedae-26b8-412b-b1af-383086a8e93f-serving-cert\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.891249 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.891454 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.891781 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/89c9a653-2d79-4af8-9ee0-04dd3058a692-etcd-client\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.892333 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4599a59e-3533-494d-b149-f84b3033c62c-serving-cert\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.892663 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-client-ca\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.892721 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-client-ca\") pod \"route-controller-manager-6576b87f9c-8brdq\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.892877 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-audit\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.892881 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-serving-cert\") pod \"route-controller-manager-6576b87f9c-8brdq\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.893333 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd4a5780-a1bb-4918-b54e-afd17c1dd9e1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6mx92\" (UID: \"cd4a5780-a1bb-4918-b54e-afd17c1dd9e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.893381 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.893409 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.893669 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/42c8738a-aad8-4cc5-b18f-92eee2745673-audit-dir\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.893738 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a68aedae-26b8-412b-b1af-383086a8e93f-service-ca-bundle\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.894111 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/89c9a653-2d79-4af8-9ee0-04dd3058a692-audit-dir\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.894171 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/98d93a4f-32ca-41db-9776-8bf3bad8727d-node-pullsecrets\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.894408 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-image-import-ca\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.895023 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-config\") pod \"route-controller-manager-6576b87f9c-8brdq\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.895550 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.895733 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/f25ddd0d-0dc9-442d-a893-6562032c3b95-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-j9pft\" (UID: \"f25ddd0d-0dc9-442d-a893-6562032c3b95\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.895966 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.896082 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a68aedae-26b8-412b-b1af-383086a8e93f-config\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.896309 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.897086 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.897112 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a68aedae-26b8-412b-b1af-383086a8e93f-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.897341 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7478385-0c3c-4f02-ab80-fc2258357773-config\") pod \"machine-approver-56656f9798-4phzk\" (UID: \"e7478385-0c3c-4f02-ab80-fc2258357773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.897737 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-config\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.897814 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/98d93a4f-32ca-41db-9776-8bf3bad8727d-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.898081 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/89c9a653-2d79-4af8-9ee0-04dd3058a692-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.898188 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/89c9a653-2d79-4af8-9ee0-04dd3058a692-audit-policies\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.898404 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f25ddd0d-0dc9-442d-a893-6562032c3b95-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-j9pft\" (UID: \"f25ddd0d-0dc9-442d-a893-6562032c3b95\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.898439 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/89c9a653-2d79-4af8-9ee0-04dd3058a692-encryption-config\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.898590 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.898614 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/2fbcced3-31b3-462e-af0f-9d80537d7d55-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-wq9f6\" (UID: \"2fbcced3-31b3-462e-af0f-9d80537d7d55\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.899037 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/e7478385-0c3c-4f02-ab80-fc2258357773-machine-approver-tls\") pod \"machine-approver-56656f9798-4phzk\" (UID: \"e7478385-0c3c-4f02-ab80-fc2258357773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.899399 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.899684 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/98d93a4f-32ca-41db-9776-8bf3bad8727d-encryption-config\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.899714 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/98d93a4f-32ca-41db-9776-8bf3bad8727d-serving-cert\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.900509 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.900801 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.900843 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2d7ng"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.901788 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zpbl8"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.902398 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.902433 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.903034 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89c9a653-2d79-4af8-9ee0-04dd3058a692-serving-cert\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.903471 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.904450 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2lzkb"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.905374 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-4h84d"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.906313 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-gq4l8"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.907235 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.909146 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.910096 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fdmjq"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.911115 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-qxlpx"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.912059 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-68tmn"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.912946 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-68tmn" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.913005 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-p5sdz"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.913396 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.913552 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/98d93a4f-32ca-41db-9776-8bf3bad8727d-etcd-client\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.913617 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-p5sdz" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.914025 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.914371 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-prx6x"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.915368 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.916419 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.917394 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.918886 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-khpgg"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.919332 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.920331 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.921249 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.922254 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-k46x5"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.923319 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-68tmn"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.924153 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-chftj"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.925122 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.926144 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.927308 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-z2hvv"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.928411 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-hj49j"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.929331 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-hj49j" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.929929 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-hj49j"] Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.932573 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.953391 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.974017 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.987179 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61462d92-fbc3-462c-b847-7c5f5e8e457a-trusted-ca\") pod \"ingress-operator-5b745b69d9-nmz9x\" (UID: \"61462d92-fbc3-462c-b847-7c5f5e8e457a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.987364 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/61462d92-fbc3-462c-b847-7c5f5e8e457a-metrics-tls\") pod \"ingress-operator-5b745b69d9-nmz9x\" (UID: \"61462d92-fbc3-462c-b847-7c5f5e8e457a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.987442 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdba9d13-b3b5-4a75-adf1-9b14ac993af1-serving-cert\") pod \"console-operator-58897d9998-2lzkb\" (UID: \"cdba9d13-b3b5-4a75-adf1-9b14ac993af1\") " pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.987545 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cdba9d13-b3b5-4a75-adf1-9b14ac993af1-trusted-ca\") pod \"console-operator-58897d9998-2lzkb\" (UID: \"cdba9d13-b3b5-4a75-adf1-9b14ac993af1\") " pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.987644 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3447472-d94d-4984-9b19-591fec8cc4b2-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-jjkdp\" (UID: \"d3447472-d94d-4984-9b19-591fec8cc4b2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.987734 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d858bbb5-c348-42d5-882f-03a21a91cbeb-serving-cert\") pod \"openshift-config-operator-7777fb866f-bvpwb\" (UID: \"d858bbb5-c348-42d5-882f-03a21a91cbeb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.987833 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k8gt\" (UniqueName: \"kubernetes.io/projected/61462d92-fbc3-462c-b847-7c5f5e8e457a-kube-api-access-8k8gt\") pod \"ingress-operator-5b745b69d9-nmz9x\" (UID: \"61462d92-fbc3-462c-b847-7c5f5e8e457a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.987911 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlpjx\" (UniqueName: \"kubernetes.io/projected/6da7486f-8911-4897-bf58-165a98baf2f8-kube-api-access-hlpjx\") pod \"kube-storage-version-migrator-operator-b67b599dd-mp48h\" (UID: \"6da7486f-8911-4897-bf58-165a98baf2f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.987993 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/61462d92-fbc3-462c-b847-7c5f5e8e457a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-nmz9x\" (UID: \"61462d92-fbc3-462c-b847-7c5f5e8e457a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.988099 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3447472-d94d-4984-9b19-591fec8cc4b2-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-jjkdp\" (UID: \"d3447472-d94d-4984-9b19-591fec8cc4b2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.988176 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdba9d13-b3b5-4a75-adf1-9b14ac993af1-config\") pod \"console-operator-58897d9998-2lzkb\" (UID: \"cdba9d13-b3b5-4a75-adf1-9b14ac993af1\") " pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.988283 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6da7486f-8911-4897-bf58-165a98baf2f8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mp48h\" (UID: \"6da7486f-8911-4897-bf58-165a98baf2f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.988370 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-948hs\" (UniqueName: \"kubernetes.io/projected/cdba9d13-b3b5-4a75-adf1-9b14ac993af1-kube-api-access-948hs\") pod \"console-operator-58897d9998-2lzkb\" (UID: \"cdba9d13-b3b5-4a75-adf1-9b14ac993af1\") " pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.988441 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3447472-d94d-4984-9b19-591fec8cc4b2-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-jjkdp\" (UID: \"d3447472-d94d-4984-9b19-591fec8cc4b2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.988508 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6da7486f-8911-4897-bf58-165a98baf2f8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mp48h\" (UID: \"6da7486f-8911-4897-bf58-165a98baf2f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.988585 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4psbg\" (UniqueName: \"kubernetes.io/projected/d858bbb5-c348-42d5-882f-03a21a91cbeb-kube-api-access-4psbg\") pod \"openshift-config-operator-7777fb866f-bvpwb\" (UID: \"d858bbb5-c348-42d5-882f-03a21a91cbeb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.988680 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d858bbb5-c348-42d5-882f-03a21a91cbeb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bvpwb\" (UID: \"d858bbb5-c348-42d5-882f-03a21a91cbeb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.988913 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cdba9d13-b3b5-4a75-adf1-9b14ac993af1-trusted-ca\") pod \"console-operator-58897d9998-2lzkb\" (UID: \"cdba9d13-b3b5-4a75-adf1-9b14ac993af1\") " pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.989005 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6da7486f-8911-4897-bf58-165a98baf2f8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-mp48h\" (UID: \"6da7486f-8911-4897-bf58-165a98baf2f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.989348 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d858bbb5-c348-42d5-882f-03a21a91cbeb-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bvpwb\" (UID: \"d858bbb5-c348-42d5-882f-03a21a91cbeb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.990113 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdba9d13-b3b5-4a75-adf1-9b14ac993af1-config\") pod \"console-operator-58897d9998-2lzkb\" (UID: \"cdba9d13-b3b5-4a75-adf1-9b14ac993af1\") " pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.991881 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6da7486f-8911-4897-bf58-165a98baf2f8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-mp48h\" (UID: \"6da7486f-8911-4897-bf58-165a98baf2f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.992268 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cdba9d13-b3b5-4a75-adf1-9b14ac993af1-serving-cert\") pod \"console-operator-58897d9998-2lzkb\" (UID: \"cdba9d13-b3b5-4a75-adf1-9b14ac993af1\") " pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:54 crc kubenswrapper[4919]: I0930 20:15:54.992778 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.013518 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.034250 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.044862 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d858bbb5-c348-42d5-882f-03a21a91cbeb-serving-cert\") pod \"openshift-config-operator-7777fb866f-bvpwb\" (UID: \"d858bbb5-c348-42d5-882f-03a21a91cbeb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.054496 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.073433 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.094290 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.133825 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.153961 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.175308 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.194806 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.214172 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.234957 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.253658 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.273713 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.293237 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.302585 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/61462d92-fbc3-462c-b847-7c5f5e8e457a-metrics-tls\") pod \"ingress-operator-5b745b69d9-nmz9x\" (UID: \"61462d92-fbc3-462c-b847-7c5f5e8e457a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.314380 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.344620 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.349019 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/61462d92-fbc3-462c-b847-7c5f5e8e457a-trusted-ca\") pod \"ingress-operator-5b745b69d9-nmz9x\" (UID: \"61462d92-fbc3-462c-b847-7c5f5e8e457a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.354514 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.374251 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.394493 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.413779 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.425292 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d3447472-d94d-4984-9b19-591fec8cc4b2-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-jjkdp\" (UID: \"d3447472-d94d-4984-9b19-591fec8cc4b2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.435365 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.439063 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3447472-d94d-4984-9b19-591fec8cc4b2-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-jjkdp\" (UID: \"d3447472-d94d-4984-9b19-591fec8cc4b2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.454395 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.474482 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.493547 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.514435 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.533564 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.554578 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.573830 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.594049 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.614709 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.634498 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.654321 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.674408 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.694613 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.733433 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.753525 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.773621 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.807817 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.812405 4919 request.go:700] Waited for 1.000587451s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.814966 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.833561 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.854528 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.873850 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.894926 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.913708 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.934447 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.953894 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.973864 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 30 20:15:55 crc kubenswrapper[4919]: I0930 20:15:55.993925 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.015013 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.033601 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.055065 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.073401 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.095111 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.113851 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.134130 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.153857 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.174764 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.193881 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.214746 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.234525 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.253679 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.273770 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.294089 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.313669 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.335556 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.354877 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.374382 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.395108 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.414054 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.433404 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.454305 4919 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.474691 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.530532 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rksk7\" (UniqueName: \"kubernetes.io/projected/89c9a653-2d79-4af8-9ee0-04dd3058a692-kube-api-access-rksk7\") pod \"apiserver-7bbb656c7d-pmpcw\" (UID: \"89c9a653-2d79-4af8-9ee0-04dd3058a692\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.542364 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f25ddd0d-0dc9-442d-a893-6562032c3b95-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-j9pft\" (UID: \"f25ddd0d-0dc9-442d-a893-6562032c3b95\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.563433 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8fz2\" (UniqueName: \"kubernetes.io/projected/a68aedae-26b8-412b-b1af-383086a8e93f-kube-api-access-n8fz2\") pod \"authentication-operator-69f744f599-kngwc\" (UID: \"a68aedae-26b8-412b-b1af-383086a8e93f\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.580641 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx52g\" (UniqueName: \"kubernetes.io/projected/4599a59e-3533-494d-b149-f84b3033c62c-kube-api-access-xx52g\") pod \"controller-manager-879f6c89f-m9k8f\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.602042 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwg4w\" (UniqueName: \"kubernetes.io/projected/98d93a4f-32ca-41db-9776-8bf3bad8727d-kube-api-access-xwg4w\") pod \"apiserver-76f77b778f-xns2c\" (UID: \"98d93a4f-32ca-41db-9776-8bf3bad8727d\") " pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.605660 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.621384 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.624041 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6v4n\" (UniqueName: \"kubernetes.io/projected/f25ddd0d-0dc9-442d-a893-6562032c3b95-kube-api-access-j6v4n\") pod \"cluster-image-registry-operator-dc59b4c8b-j9pft\" (UID: \"f25ddd0d-0dc9-442d-a893-6562032c3b95\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.641004 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2nv5\" (UniqueName: \"kubernetes.io/projected/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-kube-api-access-v2nv5\") pod \"route-controller-manager-6576b87f9c-8brdq\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.660010 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wchrk\" (UniqueName: \"kubernetes.io/projected/23df7f75-7b68-4810-92a4-b0e7e39f9bf4-kube-api-access-wchrk\") pod \"downloads-7954f5f757-2d7ng\" (UID: \"23df7f75-7b68-4810-92a4-b0e7e39f9bf4\") " pod="openshift-console/downloads-7954f5f757-2d7ng" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.671933 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.674330 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.682283 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g94tn\" (UniqueName: \"kubernetes.io/projected/cd4a5780-a1bb-4918-b54e-afd17c1dd9e1-kube-api-access-g94tn\") pod \"openshift-apiserver-operator-796bbdcf4f-6mx92\" (UID: \"cd4a5780-a1bb-4918-b54e-afd17c1dd9e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.691199 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zn5w\" (UniqueName: \"kubernetes.io/projected/42c8738a-aad8-4cc5-b18f-92eee2745673-kube-api-access-9zn5w\") pod \"oauth-openshift-558db77b4-s5kls\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.714307 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m6mf\" (UniqueName: \"kubernetes.io/projected/e7478385-0c3c-4f02-ab80-fc2258357773-kube-api-access-8m6mf\") pod \"machine-approver-56656f9798-4phzk\" (UID: \"e7478385-0c3c-4f02-ab80-fc2258357773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.744676 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.749438 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4rdz\" (UniqueName: \"kubernetes.io/projected/2fbcced3-31b3-462e-af0f-9d80537d7d55-kube-api-access-g4rdz\") pod \"cluster-samples-operator-665b6dd947-wq9f6\" (UID: \"2fbcced3-31b3-462e-af0f-9d80537d7d55\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.753930 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.765525 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.776857 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.796907 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.806325 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.812862 4919 request.go:700] Waited for 1.899033037s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-server-tls&limit=500&resourceVersion=0 Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.819936 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.821419 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.838059 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.839175 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.853537 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.869904 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kngwc"] Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.874459 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.887990 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.892952 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.911631 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw"] Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.913725 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.916610 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6" Sep 30 20:15:56 crc kubenswrapper[4919]: W0930 20:15:56.927606 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89c9a653_2d79_4af8_9ee0_04dd3058a692.slice/crio-e28ffde3c9f0a99afc398117fa2b4762cbdf370b5969b4db1b55e5cf6e5ce069 WatchSource:0}: Error finding container e28ffde3c9f0a99afc398117fa2b4762cbdf370b5969b4db1b55e5cf6e5ce069: Status 404 returned error can't find the container with id e28ffde3c9f0a99afc398117fa2b4762cbdf370b5969b4db1b55e5cf6e5ce069 Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.928992 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2d7ng" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.950724 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k8gt\" (UniqueName: \"kubernetes.io/projected/61462d92-fbc3-462c-b847-7c5f5e8e457a-kube-api-access-8k8gt\") pod \"ingress-operator-5b745b69d9-nmz9x\" (UID: \"61462d92-fbc3-462c-b847-7c5f5e8e457a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.967977 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlpjx\" (UniqueName: \"kubernetes.io/projected/6da7486f-8911-4897-bf58-165a98baf2f8-kube-api-access-hlpjx\") pod \"kube-storage-version-migrator-operator-b67b599dd-mp48h\" (UID: \"6da7486f-8911-4897-bf58-165a98baf2f8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.980190 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" Sep 30 20:15:56 crc kubenswrapper[4919]: I0930 20:15:56.987947 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/61462d92-fbc3-462c-b847-7c5f5e8e457a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-nmz9x\" (UID: \"61462d92-fbc3-462c-b847-7c5f5e8e457a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.011640 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d3447472-d94d-4984-9b19-591fec8cc4b2-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-jjkdp\" (UID: \"d3447472-d94d-4984-9b19-591fec8cc4b2\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.032693 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-948hs\" (UniqueName: \"kubernetes.io/projected/cdba9d13-b3b5-4a75-adf1-9b14ac993af1-kube-api-access-948hs\") pod \"console-operator-58897d9998-2lzkb\" (UID: \"cdba9d13-b3b5-4a75-adf1-9b14ac993af1\") " pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.051820 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4psbg\" (UniqueName: \"kubernetes.io/projected/d858bbb5-c348-42d5-882f-03a21a91cbeb-kube-api-access-4psbg\") pod \"openshift-config-operator-7777fb866f-bvpwb\" (UID: \"d858bbb5-c348-42d5-882f-03a21a91cbeb\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.079726 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.118190 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fdmjq\" (UID: \"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.118751 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-oauth-config\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.118776 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f34b8c46-00c7-483c-b446-67990101e057-images\") pod \"machine-config-operator-74547568cd-pxjnq\" (UID: \"f34b8c46-00c7-483c-b446-67990101e057\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.118793 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6be387e2-3aff-43e1-91bc-bc8257764da1-trusted-ca\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121390 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/23070aa6-f355-494e-b108-a3fba285cd2c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-khpgg\" (UID: \"23070aa6-f355-494e-b108-a3fba285cd2c\") " pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121448 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-service-ca\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121465 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c5ed7546-9652-448f-ac46-f4325cd00b24-proxy-tls\") pod \"machine-config-controller-84d6567774-5xplf\" (UID: \"c5ed7546-9652-448f-ac46-f4325cd00b24\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121534 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-registry-tls\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121597 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27jn7\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-kube-api-access-27jn7\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121617 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aed3f144-f330-47b6-b73b-5b079ba9f89d-config\") pod \"kube-controller-manager-operator-78b949d7b-h4fv8\" (UID: \"aed3f144-f330-47b6-b73b-5b079ba9f89d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121682 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44xq9\" (UniqueName: \"kubernetes.io/projected/23070aa6-f355-494e-b108-a3fba285cd2c-kube-api-access-44xq9\") pod \"marketplace-operator-79b997595-khpgg\" (UID: \"23070aa6-f355-494e-b108-a3fba285cd2c\") " pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121719 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5s629\" (UniqueName: \"kubernetes.io/projected/bbebe3c4-0f6b-4779-8a37-5c716c90c409-kube-api-access-5s629\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121768 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g7nf\" (UniqueName: \"kubernetes.io/projected/e19e9e5b-3b2f-41ab-943c-e114f7613991-kube-api-access-7g7nf\") pod \"dns-operator-744455d44c-gq4l8\" (UID: \"e19e9e5b-3b2f-41ab-943c-e114f7613991\") " pod="openshift-dns-operator/dns-operator-744455d44c-gq4l8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121785 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkt27\" (UniqueName: \"kubernetes.io/projected/1724db9f-c072-42c6-a26b-5953c9656668-kube-api-access-mkt27\") pod \"olm-operator-6b444d44fb-npd4c\" (UID: \"1724db9f-c072-42c6-a26b-5953c9656668\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121827 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-trusted-ca-bundle\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121857 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6be387e2-3aff-43e1-91bc-bc8257764da1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121875 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f34b8c46-00c7-483c-b446-67990101e057-proxy-tls\") pod \"machine-config-operator-74547568cd-pxjnq\" (UID: \"f34b8c46-00c7-483c-b446-67990101e057\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121891 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e19e9e5b-3b2f-41ab-943c-e114f7613991-metrics-tls\") pod \"dns-operator-744455d44c-gq4l8\" (UID: \"e19e9e5b-3b2f-41ab-943c-e114f7613991\") " pod="openshift-dns-operator/dns-operator-744455d44c-gq4l8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121907 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhskm\" (UniqueName: \"kubernetes.io/projected/fc156064-2b1c-47c2-b91c-b7318dacb213-kube-api-access-rhskm\") pod \"openshift-controller-manager-operator-756b6f6bc6-xxkkd\" (UID: \"fc156064-2b1c-47c2-b91c-b7318dacb213\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121937 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/bbebe3c4-0f6b-4779-8a37-5c716c90c409-etcd-service-ca\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.121971 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc156064-2b1c-47c2-b91c-b7318dacb213-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xxkkd\" (UID: \"fc156064-2b1c-47c2-b91c-b7318dacb213\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.122304 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f34b8c46-00c7-483c-b446-67990101e057-auth-proxy-config\") pod \"machine-config-operator-74547568cd-pxjnq\" (UID: \"f34b8c46-00c7-483c-b446-67990101e057\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.122321 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.122434 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc156064-2b1c-47c2-b91c-b7318dacb213-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xxkkd\" (UID: \"fc156064-2b1c-47c2-b91c-b7318dacb213\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.122717 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-serving-cert\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.122748 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbebe3c4-0f6b-4779-8a37-5c716c90c409-config\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123014 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6be387e2-3aff-43e1-91bc-bc8257764da1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123043 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c5ed7546-9652-448f-ac46-f4325cd00b24-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5xplf\" (UID: \"c5ed7546-9652-448f-ac46-f4325cd00b24\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123066 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/bbebe3c4-0f6b-4779-8a37-5c716c90c409-etcd-ca\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123091 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aed3f144-f330-47b6-b73b-5b079ba9f89d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-h4fv8\" (UID: \"aed3f144-f330-47b6-b73b-5b079ba9f89d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123129 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1724db9f-c072-42c6-a26b-5953c9656668-profile-collector-cert\") pod \"olm-operator-6b444d44fb-npd4c\" (UID: \"1724db9f-c072-42c6-a26b-5953c9656668\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123144 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww9kh\" (UniqueName: \"kubernetes.io/projected/c5ed7546-9652-448f-ac46-f4325cd00b24-kube-api-access-ww9kh\") pod \"machine-config-controller-84d6567774-5xplf\" (UID: \"c5ed7546-9652-448f-ac46-f4325cd00b24\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123556 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bbebe3c4-0f6b-4779-8a37-5c716c90c409-etcd-client\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123587 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-626b2\" (UniqueName: \"kubernetes.io/projected/1c528181-4537-450e-b8b8-23b70b25a9c8-kube-api-access-626b2\") pod \"migrator-59844c95c7-4h84d\" (UID: \"1c528181-4537-450e-b8b8-23b70b25a9c8\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4h84d" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123650 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123669 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bbebe3c4-0f6b-4779-8a37-5c716c90c409-serving-cert\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123698 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1724db9f-c072-42c6-a26b-5953c9656668-srv-cert\") pod \"olm-operator-6b444d44fb-npd4c\" (UID: \"1724db9f-c072-42c6-a26b-5953c9656668\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123731 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aed3f144-f330-47b6-b73b-5b079ba9f89d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-h4fv8\" (UID: \"aed3f144-f330-47b6-b73b-5b079ba9f89d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123813 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-config\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.123922 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b4fv\" (UniqueName: \"kubernetes.io/projected/f34b8c46-00c7-483c-b446-67990101e057-kube-api-access-4b4fv\") pod \"machine-config-operator-74547568cd-pxjnq\" (UID: \"f34b8c46-00c7-483c-b446-67990101e057\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: E0930 20:15:57.124020 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:57.624007087 +0000 UTC m=+142.740040214 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.124013 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56-config\") pod \"machine-api-operator-5694c8668f-fdmjq\" (UID: \"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.124060 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-oauth-serving-cert\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.124081 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vghd7\" (UniqueName: \"kubernetes.io/projected/2d053914-edeb-49d0-bffa-b6d63885a5fb-kube-api-access-vghd7\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.124101 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glfck\" (UniqueName: \"kubernetes.io/projected/f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56-kube-api-access-glfck\") pod \"machine-api-operator-5694c8668f-fdmjq\" (UID: \"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.124138 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6be387e2-3aff-43e1-91bc-bc8257764da1-registry-certificates\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.124178 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/23070aa6-f355-494e-b108-a3fba285cd2c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-khpgg\" (UID: \"23070aa6-f355-494e-b108-a3fba285cd2c\") " pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.124200 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-bound-sa-token\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.124236 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56-images\") pod \"machine-api-operator-5694c8668f-fdmjq\" (UID: \"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.130079 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.222311 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-m9k8f"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.225420 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226270 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/bbebe3c4-0f6b-4779-8a37-5c716c90c409-etcd-service-ca\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226298 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c019c27b-0131-498d-b84a-6c79511d176e-service-ca-bundle\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226316 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brx5c\" (UniqueName: \"kubernetes.io/projected/0a9f6d48-6413-4624-9598-615b4f16382f-kube-api-access-brx5c\") pod \"ingress-canary-68tmn\" (UID: \"0a9f6d48-6413-4624-9598-615b4f16382f\") " pod="openshift-ingress-canary/ingress-canary-68tmn" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226334 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-registration-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226361 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc156064-2b1c-47c2-b91c-b7318dacb213-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xxkkd\" (UID: \"fc156064-2b1c-47c2-b91c-b7318dacb213\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226377 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c019c27b-0131-498d-b84a-6c79511d176e-metrics-certs\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226404 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f34b8c46-00c7-483c-b446-67990101e057-auth-proxy-config\") pod \"machine-config-operator-74547568cd-pxjnq\" (UID: \"f34b8c46-00c7-483c-b446-67990101e057\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226422 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3abeda0a-0453-46a4-b73e-eccafb442e4d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-r9n6s\" (UID: \"3abeda0a-0453-46a4-b73e-eccafb442e4d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226455 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc156064-2b1c-47c2-b91c-b7318dacb213-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xxkkd\" (UID: \"fc156064-2b1c-47c2-b91c-b7318dacb213\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226479 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-serving-cert\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226496 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbebe3c4-0f6b-4779-8a37-5c716c90c409-config\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226511 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h27cw\" (UniqueName: \"kubernetes.io/projected/c019c27b-0131-498d-b84a-6c79511d176e-kube-api-access-h27cw\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226545 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6be387e2-3aff-43e1-91bc-bc8257764da1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226561 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c5ed7546-9652-448f-ac46-f4325cd00b24-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5xplf\" (UID: \"c5ed7546-9652-448f-ac46-f4325cd00b24\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226575 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/bbebe3c4-0f6b-4779-8a37-5c716c90c409-etcd-ca\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226588 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-csi-data-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226606 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aed3f144-f330-47b6-b73b-5b079ba9f89d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-h4fv8\" (UID: \"aed3f144-f330-47b6-b73b-5b079ba9f89d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226631 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1724db9f-c072-42c6-a26b-5953c9656668-profile-collector-cert\") pod \"olm-operator-6b444d44fb-npd4c\" (UID: \"1724db9f-c072-42c6-a26b-5953c9656668\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226648 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww9kh\" (UniqueName: \"kubernetes.io/projected/c5ed7546-9652-448f-ac46-f4325cd00b24-kube-api-access-ww9kh\") pod \"machine-config-controller-84d6567774-5xplf\" (UID: \"c5ed7546-9652-448f-ac46-f4325cd00b24\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226665 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1da44c87-3dcd-4bf1-b898-df3cb2f860f8-apiservice-cert\") pod \"packageserver-d55dfcdfc-qbmcz\" (UID: \"1da44c87-3dcd-4bf1-b898-df3cb2f860f8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226716 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a9f6d48-6413-4624-9598-615b4f16382f-cert\") pod \"ingress-canary-68tmn\" (UID: \"0a9f6d48-6413-4624-9598-615b4f16382f\") " pod="openshift-ingress-canary/ingress-canary-68tmn" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226752 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qlvb\" (UniqueName: \"kubernetes.io/projected/27b68b20-5ad0-4c14-b3cd-31f070fb3ab6-kube-api-access-8qlvb\") pod \"catalog-operator-68c6474976-bb7h5\" (UID: \"27b68b20-5ad0-4c14-b3cd-31f070fb3ab6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226819 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/31ef9900-22cb-4eb9-ab61-5b378f168126-node-bootstrap-token\") pod \"machine-config-server-p5sdz\" (UID: \"31ef9900-22cb-4eb9-ab61-5b378f168126\") " pod="openshift-machine-config-operator/machine-config-server-p5sdz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226836 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/fec5f277-cd5b-43e4-a996-890b4e736f42-signing-key\") pod \"service-ca-9c57cc56f-chftj\" (UID: \"fec5f277-cd5b-43e4-a996-890b4e736f42\") " pod="openshift-service-ca/service-ca-9c57cc56f-chftj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226851 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bbebe3c4-0f6b-4779-8a37-5c716c90c409-etcd-client\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226868 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0dda3300-8f42-4f22-b2f0-7a5235e607ef-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-z2hvv\" (UID: \"0dda3300-8f42-4f22-b2f0-7a5235e607ef\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-z2hvv" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226893 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqh65\" (UniqueName: \"kubernetes.io/projected/764e2fc0-f6af-45a8-8a90-f78ce95abf62-kube-api-access-qqh65\") pod \"control-plane-machine-set-operator-78cbb6b69f-lfnjg\" (UID: \"764e2fc0-f6af-45a8-8a90-f78ce95abf62\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226910 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-626b2\" (UniqueName: \"kubernetes.io/projected/1c528181-4537-450e-b8b8-23b70b25a9c8-kube-api-access-626b2\") pod \"migrator-59844c95c7-4h84d\" (UID: \"1c528181-4537-450e-b8b8-23b70b25a9c8\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4h84d" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226936 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bbebe3c4-0f6b-4779-8a37-5c716c90c409-serving-cert\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226952 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8ptm\" (UniqueName: \"kubernetes.io/projected/31ef9900-22cb-4eb9-ab61-5b378f168126-kube-api-access-j8ptm\") pod \"machine-config-server-p5sdz\" (UID: \"31ef9900-22cb-4eb9-ab61-5b378f168126\") " pod="openshift-machine-config-operator/machine-config-server-p5sdz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.226987 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1724db9f-c072-42c6-a26b-5953c9656668-srv-cert\") pod \"olm-operator-6b444d44fb-npd4c\" (UID: \"1724db9f-c072-42c6-a26b-5953c9656668\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227002 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-socket-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227018 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/fec5f277-cd5b-43e4-a996-890b4e736f42-signing-cabundle\") pod \"service-ca-9c57cc56f-chftj\" (UID: \"fec5f277-cd5b-43e4-a996-890b4e736f42\") " pod="openshift-service-ca/service-ca-9c57cc56f-chftj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227046 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aed3f144-f330-47b6-b73b-5b079ba9f89d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-h4fv8\" (UID: \"aed3f144-f330-47b6-b73b-5b079ba9f89d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227062 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2crdl\" (UniqueName: \"kubernetes.io/projected/65daa38d-8652-4438-af0e-5afc3524e5d4-kube-api-access-2crdl\") pod \"collect-profiles-29321055-2qkks\" (UID: \"65daa38d-8652-4438-af0e-5afc3524e5d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227080 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-config\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227095 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b4fv\" (UniqueName: \"kubernetes.io/projected/f34b8c46-00c7-483c-b446-67990101e057-kube-api-access-4b4fv\") pod \"machine-config-operator-74547568cd-pxjnq\" (UID: \"f34b8c46-00c7-483c-b446-67990101e057\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227111 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26051892-8dc6-4bf1-a7ba-0e9df1dea6e2-config-volume\") pod \"dns-default-hj49j\" (UID: \"26051892-8dc6-4bf1-a7ba-0e9df1dea6e2\") " pod="openshift-dns/dns-default-hj49j" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227127 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c019c27b-0131-498d-b84a-6c79511d176e-default-certificate\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227145 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/65daa38d-8652-4438-af0e-5afc3524e5d4-secret-volume\") pod \"collect-profiles-29321055-2qkks\" (UID: \"65daa38d-8652-4438-af0e-5afc3524e5d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227164 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56-config\") pod \"machine-api-operator-5694c8668f-fdmjq\" (UID: \"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227196 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckckw\" (UniqueName: \"kubernetes.io/projected/8e136a30-1aed-41b9-a85e-c89fa3811e25-kube-api-access-ckckw\") pod \"service-ca-operator-777779d784-bx5zk\" (UID: \"8e136a30-1aed-41b9-a85e-c89fa3811e25\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227239 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f75f4e78-ba83-436e-9f99-6156d6d065f4-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2b2p5\" (UID: \"f75f4e78-ba83-436e-9f99-6156d6d065f4\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227257 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1da44c87-3dcd-4bf1-b898-df3cb2f860f8-tmpfs\") pod \"packageserver-d55dfcdfc-qbmcz\" (UID: \"1da44c87-3dcd-4bf1-b898-df3cb2f860f8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227271 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e136a30-1aed-41b9-a85e-c89fa3811e25-serving-cert\") pod \"service-ca-operator-777779d784-bx5zk\" (UID: \"8e136a30-1aed-41b9-a85e-c89fa3811e25\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227316 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-oauth-serving-cert\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227332 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/31ef9900-22cb-4eb9-ab61-5b378f168126-certs\") pod \"machine-config-server-p5sdz\" (UID: \"31ef9900-22cb-4eb9-ab61-5b378f168126\") " pod="openshift-machine-config-operator/machine-config-server-p5sdz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227346 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/26051892-8dc6-4bf1-a7ba-0e9df1dea6e2-metrics-tls\") pod \"dns-default-hj49j\" (UID: \"26051892-8dc6-4bf1-a7ba-0e9df1dea6e2\") " pod="openshift-dns/dns-default-hj49j" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227360 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65daa38d-8652-4438-af0e-5afc3524e5d4-config-volume\") pod \"collect-profiles-29321055-2qkks\" (UID: \"65daa38d-8652-4438-af0e-5afc3524e5d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227376 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6be387e2-3aff-43e1-91bc-bc8257764da1-registry-certificates\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227393 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vghd7\" (UniqueName: \"kubernetes.io/projected/2d053914-edeb-49d0-bffa-b6d63885a5fb-kube-api-access-vghd7\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227408 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glfck\" (UniqueName: \"kubernetes.io/projected/f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56-kube-api-access-glfck\") pod \"machine-api-operator-5694c8668f-fdmjq\" (UID: \"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227440 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-bound-sa-token\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227458 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/23070aa6-f355-494e-b108-a3fba285cd2c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-khpgg\" (UID: \"23070aa6-f355-494e-b108-a3fba285cd2c\") " pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227493 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56-images\") pod \"machine-api-operator-5694c8668f-fdmjq\" (UID: \"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227510 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-plugins-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227525 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/27b68b20-5ad0-4c14-b3cd-31f070fb3ab6-profile-collector-cert\") pod \"catalog-operator-68c6474976-bb7h5\" (UID: \"27b68b20-5ad0-4c14-b3cd-31f070fb3ab6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227540 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-mountpoint-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227561 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3abeda0a-0453-46a4-b73e-eccafb442e4d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-r9n6s\" (UID: \"3abeda0a-0453-46a4-b73e-eccafb442e4d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227577 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fdmjq\" (UID: \"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227602 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3abeda0a-0453-46a4-b73e-eccafb442e4d-config\") pod \"kube-apiserver-operator-766d6c64bb-r9n6s\" (UID: \"3abeda0a-0453-46a4-b73e-eccafb442e4d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227620 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6be387e2-3aff-43e1-91bc-bc8257764da1-trusted-ca\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227636 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-oauth-config\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227652 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f34b8c46-00c7-483c-b446-67990101e057-images\") pod \"machine-config-operator-74547568cd-pxjnq\" (UID: \"f34b8c46-00c7-483c-b446-67990101e057\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227667 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpk4s\" (UniqueName: \"kubernetes.io/projected/26051892-8dc6-4bf1-a7ba-0e9df1dea6e2-kube-api-access-mpk4s\") pod \"dns-default-hj49j\" (UID: \"26051892-8dc6-4bf1-a7ba-0e9df1dea6e2\") " pod="openshift-dns/dns-default-hj49j" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227684 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/23070aa6-f355-494e-b108-a3fba285cd2c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-khpgg\" (UID: \"23070aa6-f355-494e-b108-a3fba285cd2c\") " pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227699 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-service-ca\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227715 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c5ed7546-9652-448f-ac46-f4325cd00b24-proxy-tls\") pod \"machine-config-controller-84d6567774-5xplf\" (UID: \"c5ed7546-9652-448f-ac46-f4325cd00b24\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227739 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-registry-tls\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227757 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/764e2fc0-f6af-45a8-8a90-f78ce95abf62-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-lfnjg\" (UID: \"764e2fc0-f6af-45a8-8a90-f78ce95abf62\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227777 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27jn7\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-kube-api-access-27jn7\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227792 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf7sx\" (UniqueName: \"kubernetes.io/projected/0dda3300-8f42-4f22-b2f0-7a5235e607ef-kube-api-access-tf7sx\") pod \"multus-admission-controller-857f4d67dd-z2hvv\" (UID: \"0dda3300-8f42-4f22-b2f0-7a5235e607ef\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-z2hvv" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227818 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aed3f144-f330-47b6-b73b-5b079ba9f89d-config\") pod \"kube-controller-manager-operator-78b949d7b-h4fv8\" (UID: \"aed3f144-f330-47b6-b73b-5b079ba9f89d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227836 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44xq9\" (UniqueName: \"kubernetes.io/projected/23070aa6-f355-494e-b108-a3fba285cd2c-kube-api-access-44xq9\") pod \"marketplace-operator-79b997595-khpgg\" (UID: \"23070aa6-f355-494e-b108-a3fba285cd2c\") " pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227853 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jblpc\" (UniqueName: \"kubernetes.io/projected/f75f4e78-ba83-436e-9f99-6156d6d065f4-kube-api-access-jblpc\") pod \"package-server-manager-789f6589d5-2b2p5\" (UID: \"f75f4e78-ba83-436e-9f99-6156d6d065f4\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227896 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5s629\" (UniqueName: \"kubernetes.io/projected/bbebe3c4-0f6b-4779-8a37-5c716c90c409-kube-api-access-5s629\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.227915 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdk2b\" (UniqueName: \"kubernetes.io/projected/fec5f277-cd5b-43e4-a996-890b4e736f42-kube-api-access-bdk2b\") pod \"service-ca-9c57cc56f-chftj\" (UID: \"fec5f277-cd5b-43e4-a996-890b4e736f42\") " pod="openshift-service-ca/service-ca-9c57cc56f-chftj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228613 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1da44c87-3dcd-4bf1-b898-df3cb2f860f8-webhook-cert\") pod \"packageserver-d55dfcdfc-qbmcz\" (UID: \"1da44c87-3dcd-4bf1-b898-df3cb2f860f8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228649 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g7nf\" (UniqueName: \"kubernetes.io/projected/e19e9e5b-3b2f-41ab-943c-e114f7613991-kube-api-access-7g7nf\") pod \"dns-operator-744455d44c-gq4l8\" (UID: \"e19e9e5b-3b2f-41ab-943c-e114f7613991\") " pod="openshift-dns-operator/dns-operator-744455d44c-gq4l8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228666 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkt27\" (UniqueName: \"kubernetes.io/projected/1724db9f-c072-42c6-a26b-5953c9656668-kube-api-access-mkt27\") pod \"olm-operator-6b444d44fb-npd4c\" (UID: \"1724db9f-c072-42c6-a26b-5953c9656668\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228701 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/27b68b20-5ad0-4c14-b3cd-31f070fb3ab6-srv-cert\") pod \"catalog-operator-68c6474976-bb7h5\" (UID: \"27b68b20-5ad0-4c14-b3cd-31f070fb3ab6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228746 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6cpv\" (UniqueName: \"kubernetes.io/projected/a25ce588-b65d-4541-ba64-7a4219330a33-kube-api-access-b6cpv\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228764 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-trusted-ca-bundle\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228791 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e136a30-1aed-41b9-a85e-c89fa3811e25-config\") pod \"service-ca-operator-777779d784-bx5zk\" (UID: \"8e136a30-1aed-41b9-a85e-c89fa3811e25\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228808 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfnmz\" (UniqueName: \"kubernetes.io/projected/1da44c87-3dcd-4bf1-b898-df3cb2f860f8-kube-api-access-gfnmz\") pod \"packageserver-d55dfcdfc-qbmcz\" (UID: \"1da44c87-3dcd-4bf1-b898-df3cb2f860f8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228843 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6be387e2-3aff-43e1-91bc-bc8257764da1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228860 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f34b8c46-00c7-483c-b446-67990101e057-proxy-tls\") pod \"machine-config-operator-74547568cd-pxjnq\" (UID: \"f34b8c46-00c7-483c-b446-67990101e057\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228877 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e19e9e5b-3b2f-41ab-943c-e114f7613991-metrics-tls\") pod \"dns-operator-744455d44c-gq4l8\" (UID: \"e19e9e5b-3b2f-41ab-943c-e114f7613991\") " pod="openshift-dns-operator/dns-operator-744455d44c-gq4l8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228894 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhskm\" (UniqueName: \"kubernetes.io/projected/fc156064-2b1c-47c2-b91c-b7318dacb213-kube-api-access-rhskm\") pod \"openshift-controller-manager-operator-756b6f6bc6-xxkkd\" (UID: \"fc156064-2b1c-47c2-b91c-b7318dacb213\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.228910 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c019c27b-0131-498d-b84a-6c79511d176e-stats-auth\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: E0930 20:15:57.231033 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:57.731016002 +0000 UTC m=+142.847049129 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.234229 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/bbebe3c4-0f6b-4779-8a37-5c716c90c409-etcd-service-ca\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.235000 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.235009 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc156064-2b1c-47c2-b91c-b7318dacb213-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xxkkd\" (UID: \"fc156064-2b1c-47c2-b91c-b7318dacb213\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.235844 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-oauth-serving-cert\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.235935 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f34b8c46-00c7-483c-b446-67990101e057-auth-proxy-config\") pod \"machine-config-operator-74547568cd-pxjnq\" (UID: \"f34b8c46-00c7-483c-b446-67990101e057\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.236973 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6be387e2-3aff-43e1-91bc-bc8257764da1-registry-certificates\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.237860 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aed3f144-f330-47b6-b73b-5b079ba9f89d-config\") pod \"kube-controller-manager-operator-78b949d7b-h4fv8\" (UID: \"aed3f144-f330-47b6-b73b-5b079ba9f89d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.238701 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56-images\") pod \"machine-api-operator-5694c8668f-fdmjq\" (UID: \"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.239122 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-config\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.240097 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56-config\") pod \"machine-api-operator-5694c8668f-fdmjq\" (UID: \"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.240612 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/23070aa6-f355-494e-b108-a3fba285cd2c-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-khpgg\" (UID: \"23070aa6-f355-494e-b108-a3fba285cd2c\") " pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.240675 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bbebe3c4-0f6b-4779-8a37-5c716c90c409-config\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.241923 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f34b8c46-00c7-483c-b446-67990101e057-images\") pod \"machine-config-operator-74547568cd-pxjnq\" (UID: \"f34b8c46-00c7-483c-b446-67990101e057\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.241962 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-trusted-ca-bundle\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.242707 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-service-ca\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.244113 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/bbebe3c4-0f6b-4779-8a37-5c716c90c409-etcd-ca\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.244418 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6be387e2-3aff-43e1-91bc-bc8257764da1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.245050 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c5ed7546-9652-448f-ac46-f4325cd00b24-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5xplf\" (UID: \"c5ed7546-9652-448f-ac46-f4325cd00b24\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.249936 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6be387e2-3aff-43e1-91bc-bc8257764da1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.251839 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-oauth-config\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.253069 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c5ed7546-9652-448f-ac46-f4325cd00b24-proxy-tls\") pod \"machine-config-controller-84d6567774-5xplf\" (UID: \"c5ed7546-9652-448f-ac46-f4325cd00b24\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.254856 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-s5kls"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.255722 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-registry-tls\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.255765 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-serving-cert\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.256017 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc156064-2b1c-47c2-b91c-b7318dacb213-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xxkkd\" (UID: \"fc156064-2b1c-47c2-b91c-b7318dacb213\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.256078 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f34b8c46-00c7-483c-b446-67990101e057-proxy-tls\") pod \"machine-config-operator-74547568cd-pxjnq\" (UID: \"f34b8c46-00c7-483c-b446-67990101e057\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.256248 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1724db9f-c072-42c6-a26b-5953c9656668-profile-collector-cert\") pod \"olm-operator-6b444d44fb-npd4c\" (UID: \"1724db9f-c072-42c6-a26b-5953c9656668\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.256564 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aed3f144-f330-47b6-b73b-5b079ba9f89d-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-h4fv8\" (UID: \"aed3f144-f330-47b6-b73b-5b079ba9f89d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.256864 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-fdmjq\" (UID: \"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.257145 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6be387e2-3aff-43e1-91bc-bc8257764da1-trusted-ca\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.258061 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/bbebe3c4-0f6b-4779-8a37-5c716c90c409-etcd-client\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.258128 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1724db9f-c072-42c6-a26b-5953c9656668-srv-cert\") pod \"olm-operator-6b444d44fb-npd4c\" (UID: \"1724db9f-c072-42c6-a26b-5953c9656668\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.258138 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/23070aa6-f355-494e-b108-a3fba285cd2c-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-khpgg\" (UID: \"23070aa6-f355-494e-b108-a3fba285cd2c\") " pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.258517 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bbebe3c4-0f6b-4779-8a37-5c716c90c409-serving-cert\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.275648 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e19e9e5b-3b2f-41ab-943c-e114f7613991-metrics-tls\") pod \"dns-operator-744455d44c-gq4l8\" (UID: \"e19e9e5b-3b2f-41ab-943c-e114f7613991\") " pod="openshift-dns-operator/dns-operator-744455d44c-gq4l8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.282430 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vghd7\" (UniqueName: \"kubernetes.io/projected/2d053914-edeb-49d0-bffa-b6d63885a5fb-kube-api-access-vghd7\") pod \"console-f9d7485db-qxlpx\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.291678 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.303726 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glfck\" (UniqueName: \"kubernetes.io/projected/f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56-kube-api-access-glfck\") pod \"machine-api-operator-5694c8668f-fdmjq\" (UID: \"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.307993 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-bound-sa-token\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329585 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/fec5f277-cd5b-43e4-a996-890b4e736f42-signing-key\") pod \"service-ca-9c57cc56f-chftj\" (UID: \"fec5f277-cd5b-43e4-a996-890b4e736f42\") " pod="openshift-service-ca/service-ca-9c57cc56f-chftj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329622 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqh65\" (UniqueName: \"kubernetes.io/projected/764e2fc0-f6af-45a8-8a90-f78ce95abf62-kube-api-access-qqh65\") pod \"control-plane-machine-set-operator-78cbb6b69f-lfnjg\" (UID: \"764e2fc0-f6af-45a8-8a90-f78ce95abf62\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329643 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0dda3300-8f42-4f22-b2f0-7a5235e607ef-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-z2hvv\" (UID: \"0dda3300-8f42-4f22-b2f0-7a5235e607ef\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-z2hvv" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329669 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329686 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8ptm\" (UniqueName: \"kubernetes.io/projected/31ef9900-22cb-4eb9-ab61-5b378f168126-kube-api-access-j8ptm\") pod \"machine-config-server-p5sdz\" (UID: \"31ef9900-22cb-4eb9-ab61-5b378f168126\") " pod="openshift-machine-config-operator/machine-config-server-p5sdz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329702 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-socket-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329716 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/fec5f277-cd5b-43e4-a996-890b4e736f42-signing-cabundle\") pod \"service-ca-9c57cc56f-chftj\" (UID: \"fec5f277-cd5b-43e4-a996-890b4e736f42\") " pod="openshift-service-ca/service-ca-9c57cc56f-chftj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329731 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2crdl\" (UniqueName: \"kubernetes.io/projected/65daa38d-8652-4438-af0e-5afc3524e5d4-kube-api-access-2crdl\") pod \"collect-profiles-29321055-2qkks\" (UID: \"65daa38d-8652-4438-af0e-5afc3524e5d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329756 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26051892-8dc6-4bf1-a7ba-0e9df1dea6e2-config-volume\") pod \"dns-default-hj49j\" (UID: \"26051892-8dc6-4bf1-a7ba-0e9df1dea6e2\") " pod="openshift-dns/dns-default-hj49j" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329770 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c019c27b-0131-498d-b84a-6c79511d176e-default-certificate\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329787 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/65daa38d-8652-4438-af0e-5afc3524e5d4-secret-volume\") pod \"collect-profiles-29321055-2qkks\" (UID: \"65daa38d-8652-4438-af0e-5afc3524e5d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329812 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e136a30-1aed-41b9-a85e-c89fa3811e25-serving-cert\") pod \"service-ca-operator-777779d784-bx5zk\" (UID: \"8e136a30-1aed-41b9-a85e-c89fa3811e25\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329828 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckckw\" (UniqueName: \"kubernetes.io/projected/8e136a30-1aed-41b9-a85e-c89fa3811e25-kube-api-access-ckckw\") pod \"service-ca-operator-777779d784-bx5zk\" (UID: \"8e136a30-1aed-41b9-a85e-c89fa3811e25\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329844 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f75f4e78-ba83-436e-9f99-6156d6d065f4-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2b2p5\" (UID: \"f75f4e78-ba83-436e-9f99-6156d6d065f4\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329861 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1da44c87-3dcd-4bf1-b898-df3cb2f860f8-tmpfs\") pod \"packageserver-d55dfcdfc-qbmcz\" (UID: \"1da44c87-3dcd-4bf1-b898-df3cb2f860f8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329878 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/31ef9900-22cb-4eb9-ab61-5b378f168126-certs\") pod \"machine-config-server-p5sdz\" (UID: \"31ef9900-22cb-4eb9-ab61-5b378f168126\") " pod="openshift-machine-config-operator/machine-config-server-p5sdz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329891 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/26051892-8dc6-4bf1-a7ba-0e9df1dea6e2-metrics-tls\") pod \"dns-default-hj49j\" (UID: \"26051892-8dc6-4bf1-a7ba-0e9df1dea6e2\") " pod="openshift-dns/dns-default-hj49j" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329905 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65daa38d-8652-4438-af0e-5afc3524e5d4-config-volume\") pod \"collect-profiles-29321055-2qkks\" (UID: \"65daa38d-8652-4438-af0e-5afc3524e5d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329922 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-plugins-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329939 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/27b68b20-5ad0-4c14-b3cd-31f070fb3ab6-profile-collector-cert\") pod \"catalog-operator-68c6474976-bb7h5\" (UID: \"27b68b20-5ad0-4c14-b3cd-31f070fb3ab6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329954 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-mountpoint-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329970 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3abeda0a-0453-46a4-b73e-eccafb442e4d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-r9n6s\" (UID: \"3abeda0a-0453-46a4-b73e-eccafb442e4d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.329986 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3abeda0a-0453-46a4-b73e-eccafb442e4d-config\") pod \"kube-apiserver-operator-766d6c64bb-r9n6s\" (UID: \"3abeda0a-0453-46a4-b73e-eccafb442e4d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330010 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpk4s\" (UniqueName: \"kubernetes.io/projected/26051892-8dc6-4bf1-a7ba-0e9df1dea6e2-kube-api-access-mpk4s\") pod \"dns-default-hj49j\" (UID: \"26051892-8dc6-4bf1-a7ba-0e9df1dea6e2\") " pod="openshift-dns/dns-default-hj49j" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330036 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/764e2fc0-f6af-45a8-8a90-f78ce95abf62-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-lfnjg\" (UID: \"764e2fc0-f6af-45a8-8a90-f78ce95abf62\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330059 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf7sx\" (UniqueName: \"kubernetes.io/projected/0dda3300-8f42-4f22-b2f0-7a5235e607ef-kube-api-access-tf7sx\") pod \"multus-admission-controller-857f4d67dd-z2hvv\" (UID: \"0dda3300-8f42-4f22-b2f0-7a5235e607ef\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-z2hvv" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330082 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jblpc\" (UniqueName: \"kubernetes.io/projected/f75f4e78-ba83-436e-9f99-6156d6d065f4-kube-api-access-jblpc\") pod \"package-server-manager-789f6589d5-2b2p5\" (UID: \"f75f4e78-ba83-436e-9f99-6156d6d065f4\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330111 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdk2b\" (UniqueName: \"kubernetes.io/projected/fec5f277-cd5b-43e4-a996-890b4e736f42-kube-api-access-bdk2b\") pod \"service-ca-9c57cc56f-chftj\" (UID: \"fec5f277-cd5b-43e4-a996-890b4e736f42\") " pod="openshift-service-ca/service-ca-9c57cc56f-chftj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330126 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1da44c87-3dcd-4bf1-b898-df3cb2f860f8-webhook-cert\") pod \"packageserver-d55dfcdfc-qbmcz\" (UID: \"1da44c87-3dcd-4bf1-b898-df3cb2f860f8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330151 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/27b68b20-5ad0-4c14-b3cd-31f070fb3ab6-srv-cert\") pod \"catalog-operator-68c6474976-bb7h5\" (UID: \"27b68b20-5ad0-4c14-b3cd-31f070fb3ab6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330177 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6cpv\" (UniqueName: \"kubernetes.io/projected/a25ce588-b65d-4541-ba64-7a4219330a33-kube-api-access-b6cpv\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330193 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e136a30-1aed-41b9-a85e-c89fa3811e25-config\") pod \"service-ca-operator-777779d784-bx5zk\" (UID: \"8e136a30-1aed-41b9-a85e-c89fa3811e25\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330225 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfnmz\" (UniqueName: \"kubernetes.io/projected/1da44c87-3dcd-4bf1-b898-df3cb2f860f8-kube-api-access-gfnmz\") pod \"packageserver-d55dfcdfc-qbmcz\" (UID: \"1da44c87-3dcd-4bf1-b898-df3cb2f860f8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330254 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c019c27b-0131-498d-b84a-6c79511d176e-stats-auth\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330270 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c019c27b-0131-498d-b84a-6c79511d176e-service-ca-bundle\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330287 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brx5c\" (UniqueName: \"kubernetes.io/projected/0a9f6d48-6413-4624-9598-615b4f16382f-kube-api-access-brx5c\") pod \"ingress-canary-68tmn\" (UID: \"0a9f6d48-6413-4624-9598-615b4f16382f\") " pod="openshift-ingress-canary/ingress-canary-68tmn" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330302 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-registration-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330318 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c019c27b-0131-498d-b84a-6c79511d176e-metrics-certs\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330335 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3abeda0a-0453-46a4-b73e-eccafb442e4d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-r9n6s\" (UID: \"3abeda0a-0453-46a4-b73e-eccafb442e4d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330352 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h27cw\" (UniqueName: \"kubernetes.io/projected/c019c27b-0131-498d-b84a-6c79511d176e-kube-api-access-h27cw\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330368 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-csi-data-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330389 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1da44c87-3dcd-4bf1-b898-df3cb2f860f8-apiservice-cert\") pod \"packageserver-d55dfcdfc-qbmcz\" (UID: \"1da44c87-3dcd-4bf1-b898-df3cb2f860f8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330403 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a9f6d48-6413-4624-9598-615b4f16382f-cert\") pod \"ingress-canary-68tmn\" (UID: \"0a9f6d48-6413-4624-9598-615b4f16382f\") " pod="openshift-ingress-canary/ingress-canary-68tmn" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330418 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qlvb\" (UniqueName: \"kubernetes.io/projected/27b68b20-5ad0-4c14-b3cd-31f070fb3ab6-kube-api-access-8qlvb\") pod \"catalog-operator-68c6474976-bb7h5\" (UID: \"27b68b20-5ad0-4c14-b3cd-31f070fb3ab6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.330445 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/31ef9900-22cb-4eb9-ab61-5b378f168126-node-bootstrap-token\") pod \"machine-config-server-p5sdz\" (UID: \"31ef9900-22cb-4eb9-ab61-5b378f168126\") " pod="openshift-machine-config-operator/machine-config-server-p5sdz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.333541 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/31ef9900-22cb-4eb9-ab61-5b378f168126-node-bootstrap-token\") pod \"machine-config-server-p5sdz\" (UID: \"31ef9900-22cb-4eb9-ab61-5b378f168126\") " pod="openshift-machine-config-operator/machine-config-server-p5sdz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.336761 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3abeda0a-0453-46a4-b73e-eccafb442e4d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-r9n6s\" (UID: \"3abeda0a-0453-46a4-b73e-eccafb442e4d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.338645 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3abeda0a-0453-46a4-b73e-eccafb442e4d-config\") pod \"kube-apiserver-operator-766d6c64bb-r9n6s\" (UID: \"3abeda0a-0453-46a4-b73e-eccafb442e4d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.341529 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1da44c87-3dcd-4bf1-b898-df3cb2f860f8-tmpfs\") pod \"packageserver-d55dfcdfc-qbmcz\" (UID: \"1da44c87-3dcd-4bf1-b898-df3cb2f860f8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.342087 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/fec5f277-cd5b-43e4-a996-890b4e736f42-signing-cabundle\") pod \"service-ca-9c57cc56f-chftj\" (UID: \"fec5f277-cd5b-43e4-a996-890b4e736f42\") " pod="openshift-service-ca/service-ca-9c57cc56f-chftj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.343144 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/26051892-8dc6-4bf1-a7ba-0e9df1dea6e2-config-volume\") pod \"dns-default-hj49j\" (UID: \"26051892-8dc6-4bf1-a7ba-0e9df1dea6e2\") " pod="openshift-dns/dns-default-hj49j" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.344629 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c019c27b-0131-498d-b84a-6c79511d176e-service-ca-bundle\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.344842 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-registration-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.345632 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-plugins-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: E0930 20:15:57.345865 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:57.845846533 +0000 UTC m=+142.961879750 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.346111 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-csi-data-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.346354 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-socket-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.346655 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/a25ce588-b65d-4541-ba64-7a4219330a33-mountpoint-dir\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.347193 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e136a30-1aed-41b9-a85e-c89fa3811e25-config\") pod \"service-ca-operator-777779d784-bx5zk\" (UID: \"8e136a30-1aed-41b9-a85e-c89fa3811e25\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.341529 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-626b2\" (UniqueName: \"kubernetes.io/projected/1c528181-4537-450e-b8b8-23b70b25a9c8-kube-api-access-626b2\") pod \"migrator-59844c95c7-4h84d\" (UID: \"1c528181-4537-450e-b8b8-23b70b25a9c8\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4h84d" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.347451 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.351021 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65daa38d-8652-4438-af0e-5afc3524e5d4-config-volume\") pod \"collect-profiles-29321055-2qkks\" (UID: \"65daa38d-8652-4438-af0e-5afc3524e5d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.351509 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/fec5f277-cd5b-43e4-a996-890b4e736f42-signing-key\") pod \"service-ca-9c57cc56f-chftj\" (UID: \"fec5f277-cd5b-43e4-a996-890b4e736f42\") " pod="openshift-service-ca/service-ca-9c57cc56f-chftj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.357155 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xns2c"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.357980 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0a9f6d48-6413-4624-9598-615b4f16382f-cert\") pod \"ingress-canary-68tmn\" (UID: \"0a9f6d48-6413-4624-9598-615b4f16382f\") " pod="openshift-ingress-canary/ingress-canary-68tmn" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.358815 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f75f4e78-ba83-436e-9f99-6156d6d065f4-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-2b2p5\" (UID: \"f75f4e78-ba83-436e-9f99-6156d6d065f4\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.359912 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e136a30-1aed-41b9-a85e-c89fa3811e25-serving-cert\") pod \"service-ca-operator-777779d784-bx5zk\" (UID: \"8e136a30-1aed-41b9-a85e-c89fa3811e25\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.363204 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0dda3300-8f42-4f22-b2f0-7a5235e607ef-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-z2hvv\" (UID: \"0dda3300-8f42-4f22-b2f0-7a5235e607ef\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-z2hvv" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.363657 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/26051892-8dc6-4bf1-a7ba-0e9df1dea6e2-metrics-tls\") pod \"dns-default-hj49j\" (UID: \"26051892-8dc6-4bf1-a7ba-0e9df1dea6e2\") " pod="openshift-dns/dns-default-hj49j" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.363666 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44xq9\" (UniqueName: \"kubernetes.io/projected/23070aa6-f355-494e-b108-a3fba285cd2c-kube-api-access-44xq9\") pod \"marketplace-operator-79b997595-khpgg\" (UID: \"23070aa6-f355-494e-b108-a3fba285cd2c\") " pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.363939 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c019c27b-0131-498d-b84a-6c79511d176e-default-certificate\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.364027 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/65daa38d-8652-4438-af0e-5afc3524e5d4-secret-volume\") pod \"collect-profiles-29321055-2qkks\" (UID: \"65daa38d-8652-4438-af0e-5afc3524e5d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.364244 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.365848 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/764e2fc0-f6af-45a8-8a90-f78ce95abf62-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-lfnjg\" (UID: \"764e2fc0-f6af-45a8-8a90-f78ce95abf62\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.369343 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c019c27b-0131-498d-b84a-6c79511d176e-stats-auth\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.370187 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/27b68b20-5ad0-4c14-b3cd-31f070fb3ab6-profile-collector-cert\") pod \"catalog-operator-68c6474976-bb7h5\" (UID: \"27b68b20-5ad0-4c14-b3cd-31f070fb3ab6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.372481 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww9kh\" (UniqueName: \"kubernetes.io/projected/c5ed7546-9652-448f-ac46-f4325cd00b24-kube-api-access-ww9kh\") pod \"machine-config-controller-84d6567774-5xplf\" (UID: \"c5ed7546-9652-448f-ac46-f4325cd00b24\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.376551 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/27b68b20-5ad0-4c14-b3cd-31f070fb3ab6-srv-cert\") pod \"catalog-operator-68c6474976-bb7h5\" (UID: \"27b68b20-5ad0-4c14-b3cd-31f070fb3ab6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.377256 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1da44c87-3dcd-4bf1-b898-df3cb2f860f8-webhook-cert\") pod \"packageserver-d55dfcdfc-qbmcz\" (UID: \"1da44c87-3dcd-4bf1-b898-df3cb2f860f8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.381193 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/31ef9900-22cb-4eb9-ab61-5b378f168126-certs\") pod \"machine-config-server-p5sdz\" (UID: \"31ef9900-22cb-4eb9-ab61-5b378f168126\") " pod="openshift-machine-config-operator/machine-config-server-p5sdz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.381446 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1da44c87-3dcd-4bf1-b898-df3cb2f860f8-apiservice-cert\") pod \"packageserver-d55dfcdfc-qbmcz\" (UID: \"1da44c87-3dcd-4bf1-b898-df3cb2f860f8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.381458 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c019c27b-0131-498d-b84a-6c79511d176e-metrics-certs\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.385357 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.387417 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g7nf\" (UniqueName: \"kubernetes.io/projected/e19e9e5b-3b2f-41ab-943c-e114f7613991-kube-api-access-7g7nf\") pod \"dns-operator-744455d44c-gq4l8\" (UID: \"e19e9e5b-3b2f-41ab-943c-e114f7613991\") " pod="openshift-dns-operator/dns-operator-744455d44c-gq4l8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.406404 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkt27\" (UniqueName: \"kubernetes.io/projected/1724db9f-c072-42c6-a26b-5953c9656668-kube-api-access-mkt27\") pod \"olm-operator-6b444d44fb-npd4c\" (UID: \"1724db9f-c072-42c6-a26b-5953c9656668\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.414636 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-gq4l8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.424922 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.427496 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5s629\" (UniqueName: \"kubernetes.io/projected/bbebe3c4-0f6b-4779-8a37-5c716c90c409-kube-api-access-5s629\") pod \"etcd-operator-b45778765-prx6x\" (UID: \"bbebe3c4-0f6b-4779-8a37-5c716c90c409\") " pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.431303 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:57 crc kubenswrapper[4919]: E0930 20:15:57.431683 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:57.931667766 +0000 UTC m=+143.047700893 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.437511 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.444668 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:15:57 crc kubenswrapper[4919]: W0930 20:15:57.445536 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3447472_d94d_4984_9b19_591fec8cc4b2.slice/crio-c7cd9924be54545ce4fb1af7bb3a00bebe44bb7d716d6281d23b41faac838749 WatchSource:0}: Error finding container c7cd9924be54545ce4fb1af7bb3a00bebe44bb7d716d6281d23b41faac838749: Status 404 returned error can't find the container with id c7cd9924be54545ce4fb1af7bb3a00bebe44bb7d716d6281d23b41faac838749 Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.458512 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.459546 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b4fv\" (UniqueName: \"kubernetes.io/projected/f34b8c46-00c7-483c-b446-67990101e057-kube-api-access-4b4fv\") pod \"machine-config-operator-74547568cd-pxjnq\" (UID: \"f34b8c46-00c7-483c-b446-67990101e057\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.466647 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.484182 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27jn7\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-kube-api-access-27jn7\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.488779 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.490174 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhskm\" (UniqueName: \"kubernetes.io/projected/fc156064-2b1c-47c2-b91c-b7318dacb213-kube-api-access-rhskm\") pod \"openshift-controller-manager-operator-756b6f6bc6-xxkkd\" (UID: \"fc156064-2b1c-47c2-b91c-b7318dacb213\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.493075 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.497108 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.507643 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aed3f144-f330-47b6-b73b-5b079ba9f89d-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-h4fv8\" (UID: \"aed3f144-f330-47b6-b73b-5b079ba9f89d\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" Sep 30 20:15:57 crc kubenswrapper[4919]: W0930 20:15:57.512259 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd4a5780_a1bb_4918_b54e_afd17c1dd9e1.slice/crio-fbe8a17ec9ff86a8f42116ef110213467fb9bde39674d48a800fd173f6674299 WatchSource:0}: Error finding container fbe8a17ec9ff86a8f42116ef110213467fb9bde39674d48a800fd173f6674299: Status 404 returned error can't find the container with id fbe8a17ec9ff86a8f42116ef110213467fb9bde39674d48a800fd173f6674299 Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.512307 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2d7ng"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.514346 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq"] Sep 30 20:15:57 crc kubenswrapper[4919]: W0930 20:15:57.514907 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6da7486f_8911_4897_bf58_165a98baf2f8.slice/crio-48c7693de4a95ebb487e128bb2d9c0ac82bb2418d84b5c0cc4e20a2462976d7b WatchSource:0}: Error finding container 48c7693de4a95ebb487e128bb2d9c0ac82bb2418d84b5c0cc4e20a2462976d7b: Status 404 returned error can't find the container with id 48c7693de4a95ebb487e128bb2d9c0ac82bb2418d84b5c0cc4e20a2462976d7b Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.541108 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" event={"ID":"e7478385-0c3c-4f02-ab80-fc2258357773","Type":"ContainerStarted","Data":"e4b4768ab2a20de850940d410d78518cc96fd59577ea647032763a2eb486eba4"} Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.541148 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" event={"ID":"e7478385-0c3c-4f02-ab80-fc2258357773","Type":"ContainerStarted","Data":"118f8f27cc5f16f047e121cf57faff4ffea9e0f724c95fec7602dbebad10dd21"} Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.543546 4919 generic.go:334] "Generic (PLEG): container finished" podID="89c9a653-2d79-4af8-9ee0-04dd3058a692" containerID="ca80a1632bd7a83ae1acb179b603c722e5edafc2bad4d1c0b71fee75971c431a" exitCode=0 Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.543614 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" event={"ID":"89c9a653-2d79-4af8-9ee0-04dd3058a692","Type":"ContainerDied","Data":"ca80a1632bd7a83ae1acb179b603c722e5edafc2bad4d1c0b71fee75971c431a"} Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.543640 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" event={"ID":"89c9a653-2d79-4af8-9ee0-04dd3058a692","Type":"ContainerStarted","Data":"e28ffde3c9f0a99afc398117fa2b4762cbdf370b5969b4db1b55e5cf6e5ce069"} Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.545379 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: E0930 20:15:57.545719 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.045705694 +0000 UTC m=+143.161738821 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.546501 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" event={"ID":"42c8738a-aad8-4cc5-b18f-92eee2745673","Type":"ContainerStarted","Data":"2f5cbde3edbd851149a311baa1aa43377817ae541c558eb995685f3468fb69d0"} Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.550833 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xns2c" event={"ID":"98d93a4f-32ca-41db-9776-8bf3bad8727d","Type":"ContainerStarted","Data":"144e892c7e7938f499a5ca19e2cb8d17adb42354542f18fa3b03c2f57199f264"} Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.552839 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfnmz\" (UniqueName: \"kubernetes.io/projected/1da44c87-3dcd-4bf1-b898-df3cb2f860f8-kube-api-access-gfnmz\") pod \"packageserver-d55dfcdfc-qbmcz\" (UID: \"1da44c87-3dcd-4bf1-b898-df3cb2f860f8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: W0930 20:15:57.553429 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd858bbb5_c348_42d5_882f_03a21a91cbeb.slice/crio-7c8520f533ab681686748dd8886cfbc6d9276777e3da28cd115c8a2a9739db94 WatchSource:0}: Error finding container 7c8520f533ab681686748dd8886cfbc6d9276777e3da28cd115c8a2a9739db94: Status 404 returned error can't find the container with id 7c8520f533ab681686748dd8886cfbc6d9276777e3da28cd115c8a2a9739db94 Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.554915 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" event={"ID":"d3447472-d94d-4984-9b19-591fec8cc4b2","Type":"ContainerStarted","Data":"c7cd9924be54545ce4fb1af7bb3a00bebe44bb7d716d6281d23b41faac838749"} Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.565721 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" event={"ID":"4599a59e-3533-494d-b149-f84b3033c62c","Type":"ContainerStarted","Data":"00258f713d6316dad18b7350af5745b060a79d9b81c63f8ac8df90ea645db473"} Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.566118 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.572240 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpk4s\" (UniqueName: \"kubernetes.io/projected/26051892-8dc6-4bf1-a7ba-0e9df1dea6e2-kube-api-access-mpk4s\") pod \"dns-default-hj49j\" (UID: \"26051892-8dc6-4bf1-a7ba-0e9df1dea6e2\") " pod="openshift-dns/dns-default-hj49j" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.577564 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" event={"ID":"f25ddd0d-0dc9-442d-a893-6562032c3b95","Type":"ContainerStarted","Data":"72554d03689b4d991d268d229e13b658531f65bc26385a17df22b9b46e8326c1"} Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.589670 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqh65\" (UniqueName: \"kubernetes.io/projected/764e2fc0-f6af-45a8-8a90-f78ce95abf62-kube-api-access-qqh65\") pod \"control-plane-machine-set-operator-78cbb6b69f-lfnjg\" (UID: \"764e2fc0-f6af-45a8-8a90-f78ce95abf62\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.590258 4919 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-m9k8f container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.590315 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" podUID="4599a59e-3533-494d-b149-f84b3033c62c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.607635 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckckw\" (UniqueName: \"kubernetes.io/projected/8e136a30-1aed-41b9-a85e-c89fa3811e25-kube-api-access-ckckw\") pod \"service-ca-operator-777779d784-bx5zk\" (UID: \"8e136a30-1aed-41b9-a85e-c89fa3811e25\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.613574 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" event={"ID":"a68aedae-26b8-412b-b1af-383086a8e93f","Type":"ContainerStarted","Data":"639c1de80404e32a9a99514425eb4917a7dc7683880a5ae25b1acacddbd564f9"} Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.613639 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" event={"ID":"a68aedae-26b8-412b-b1af-383086a8e93f","Type":"ContainerStarted","Data":"73e8f837ad0ccef1e03f40af8fea58e9baff70078dec61d2b69a751483ad59e0"} Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.616911 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-hj49j" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.624709 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-qxlpx"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.630981 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2crdl\" (UniqueName: \"kubernetes.io/projected/65daa38d-8652-4438-af0e-5afc3524e5d4-kube-api-access-2crdl\") pod \"collect-profiles-29321055-2qkks\" (UID: \"65daa38d-8652-4438-af0e-5afc3524e5d4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.638435 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4h84d" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.646125 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.647897 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:57 crc kubenswrapper[4919]: E0930 20:15:57.648187 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.148056885 +0000 UTC m=+143.264090012 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.648379 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: E0930 20:15:57.649648 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.14963355 +0000 UTC m=+143.265666677 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.650516 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3abeda0a-0453-46a4-b73e-eccafb442e4d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-r9n6s\" (UID: \"3abeda0a-0453-46a4-b73e-eccafb442e4d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.652208 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.670150 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jblpc\" (UniqueName: \"kubernetes.io/projected/f75f4e78-ba83-436e-9f99-6156d6d065f4-kube-api-access-jblpc\") pod \"package-server-manager-789f6589d5-2b2p5\" (UID: \"f75f4e78-ba83-436e-9f99-6156d6d065f4\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.693709 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.700778 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf7sx\" (UniqueName: \"kubernetes.io/projected/0dda3300-8f42-4f22-b2f0-7a5235e607ef-kube-api-access-tf7sx\") pod \"multus-admission-controller-857f4d67dd-z2hvv\" (UID: \"0dda3300-8f42-4f22-b2f0-7a5235e607ef\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-z2hvv" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.712448 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.713284 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6cpv\" (UniqueName: \"kubernetes.io/projected/a25ce588-b65d-4541-ba64-7a4219330a33-kube-api-access-b6cpv\") pod \"csi-hostpathplugin-k46x5\" (UID: \"a25ce588-b65d-4541-ba64-7a4219330a33\") " pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.730945 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brx5c\" (UniqueName: \"kubernetes.io/projected/0a9f6d48-6413-4624-9598-615b4f16382f-kube-api-access-brx5c\") pod \"ingress-canary-68tmn\" (UID: \"0a9f6d48-6413-4624-9598-615b4f16382f\") " pod="openshift-ingress-canary/ingress-canary-68tmn" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.749561 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:57 crc kubenswrapper[4919]: E0930 20:15:57.749917 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.24989097 +0000 UTC m=+143.365924117 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:57 crc kubenswrapper[4919]: W0930 20:15:57.752277 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod61462d92_fbc3_462c_b847_7c5f5e8e457a.slice/crio-a3744f68bb267b7244569803ca8f8f47f30b5e95e3da2896edb0ffad042042fa WatchSource:0}: Error finding container a3744f68bb267b7244569803ca8f8f47f30b5e95e3da2896edb0ffad042042fa: Status 404 returned error can't find the container with id a3744f68bb267b7244569803ca8f8f47f30b5e95e3da2896edb0ffad042042fa Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.760118 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h27cw\" (UniqueName: \"kubernetes.io/projected/c019c27b-0131-498d-b84a-6c79511d176e-kube-api-access-h27cw\") pod \"router-default-5444994796-c55sj\" (UID: \"c019c27b-0131-498d-b84a-6c79511d176e\") " pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.766498 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.768934 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8ptm\" (UniqueName: \"kubernetes.io/projected/31ef9900-22cb-4eb9-ab61-5b378f168126-kube-api-access-j8ptm\") pod \"machine-config-server-p5sdz\" (UID: \"31ef9900-22cb-4eb9-ab61-5b378f168126\") " pod="openshift-machine-config-operator/machine-config-server-p5sdz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.775756 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.786381 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.801902 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdk2b\" (UniqueName: \"kubernetes.io/projected/fec5f277-cd5b-43e4-a996-890b4e736f42-kube-api-access-bdk2b\") pod \"service-ca-9c57cc56f-chftj\" (UID: \"fec5f277-cd5b-43e4-a996-890b4e736f42\") " pod="openshift-service-ca/service-ca-9c57cc56f-chftj" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.802200 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.811535 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qlvb\" (UniqueName: \"kubernetes.io/projected/27b68b20-5ad0-4c14-b3cd-31f070fb3ab6-kube-api-access-8qlvb\") pod \"catalog-operator-68c6474976-bb7h5\" (UID: \"27b68b20-5ad0-4c14-b3cd-31f070fb3ab6\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.813436 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.821966 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.822281 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-prx6x"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.829490 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.842064 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.850574 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.851236 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-z2hvv" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.851923 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: E0930 20:15:57.852459 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.352446516 +0000 UTC m=+143.468479643 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.875228 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-k46x5" Sep 30 20:15:57 crc kubenswrapper[4919]: W0930 20:15:57.879928 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbbebe3c4_0f6b_4779_8a37_5c716c90c409.slice/crio-401f12aa830ab5c1374b478309c2229af48ca3ed842447e0afc4b6634d558354 WatchSource:0}: Error finding container 401f12aa830ab5c1374b478309c2229af48ca3ed842447e0afc4b6634d558354: Status 404 returned error can't find the container with id 401f12aa830ab5c1374b478309c2229af48ca3ed842447e0afc4b6634d558354 Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.882583 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2lzkb"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.891071 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-68tmn" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.915813 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-p5sdz" Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.955533 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:57 crc kubenswrapper[4919]: E0930 20:15:57.957399 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.457280358 +0000 UTC m=+143.573313485 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.957843 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:57 crc kubenswrapper[4919]: E0930 20:15:57.961512 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.461502791 +0000 UTC m=+143.577535908 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.964433 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-gq4l8"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.965818 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf"] Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.976716 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-fdmjq"] Sep 30 20:15:57 crc kubenswrapper[4919]: W0930 20:15:57.977521 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1724db9f_c072_42c6_a26b_5953c9656668.slice/crio-4c68b250c2bf1709d0a1dbd652ad94cdddba0e2ae1efd8c2fba07ab9d065f9bc WatchSource:0}: Error finding container 4c68b250c2bf1709d0a1dbd652ad94cdddba0e2ae1efd8c2fba07ab9d065f9bc: Status 404 returned error can't find the container with id 4c68b250c2bf1709d0a1dbd652ad94cdddba0e2ae1efd8c2fba07ab9d065f9bc Sep 30 20:15:57 crc kubenswrapper[4919]: I0930 20:15:57.994283 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-khpgg"] Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.053048 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8"] Sep 30 20:15:58 crc kubenswrapper[4919]: W0930 20:15:58.053507 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcdba9d13_b3b5_4a75_adf1_9b14ac993af1.slice/crio-7c4f0467223185089839d5a9804855763ef27454127e7e6e44de83c84916990e WatchSource:0}: Error finding container 7c4f0467223185089839d5a9804855763ef27454127e7e6e44de83c84916990e: Status 404 returned error can't find the container with id 7c4f0467223185089839d5a9804855763ef27454127e7e6e44de83c84916990e Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.063859 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:58 crc kubenswrapper[4919]: E0930 20:15:58.063985 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.563963764 +0000 UTC m=+143.679996891 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.064286 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:58 crc kubenswrapper[4919]: E0930 20:15:58.064547 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.564539491 +0000 UTC m=+143.680572618 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:58 crc kubenswrapper[4919]: W0930 20:15:58.086589 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5ed7546_9652_448f_ac46_f4325cd00b24.slice/crio-0e8d3c968d3858d7d6fce6d5d22fdcb0e4e772f1c98c75b039ee1dc65a7dbada WatchSource:0}: Error finding container 0e8d3c968d3858d7d6fce6d5d22fdcb0e4e772f1c98c75b039ee1dc65a7dbada: Status 404 returned error can't find the container with id 0e8d3c968d3858d7d6fce6d5d22fdcb0e4e772f1c98c75b039ee1dc65a7dbada Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.094142 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-chftj" Sep 30 20:15:58 crc kubenswrapper[4919]: W0930 20:15:58.112626 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode19e9e5b_3b2f_41ab_943c_e114f7613991.slice/crio-35efa86d60c6ec314073a5b87b3a8c8ebb144092eb3c82e2d37c844507550019 WatchSource:0}: Error finding container 35efa86d60c6ec314073a5b87b3a8c8ebb144092eb3c82e2d37c844507550019: Status 404 returned error can't find the container with id 35efa86d60c6ec314073a5b87b3a8c8ebb144092eb3c82e2d37c844507550019 Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.167973 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:58 crc kubenswrapper[4919]: E0930 20:15:58.168620 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.66858609 +0000 UTC m=+143.784619207 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.172997 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-4h84d"] Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.176524 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd"] Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.269513 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:58 crc kubenswrapper[4919]: E0930 20:15:58.270331 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.770315563 +0000 UTC m=+143.886348690 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.322896 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-hj49j"] Sep 30 20:15:58 crc kubenswrapper[4919]: W0930 20:15:58.336425 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31ef9900_22cb_4eb9_ab61_5b378f168126.slice/crio-f4069c6d4359753b5385999f4a236d098c9befdbb7fd01a9501ff4e4e6a6505d WatchSource:0}: Error finding container f4069c6d4359753b5385999f4a236d098c9befdbb7fd01a9501ff4e4e6a6505d: Status 404 returned error can't find the container with id f4069c6d4359753b5385999f4a236d098c9befdbb7fd01a9501ff4e4e6a6505d Sep 30 20:15:58 crc kubenswrapper[4919]: W0930 20:15:58.352024 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc019c27b_0131_498d_b84a_6c79511d176e.slice/crio-85776c7b7b8924ee854b68e600e348b1152370e349eaaade11a61375659f70e9 WatchSource:0}: Error finding container 85776c7b7b8924ee854b68e600e348b1152370e349eaaade11a61375659f70e9: Status 404 returned error can't find the container with id 85776c7b7b8924ee854b68e600e348b1152370e349eaaade11a61375659f70e9 Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.360041 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq"] Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.371421 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:58 crc kubenswrapper[4919]: E0930 20:15:58.371656 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.871625073 +0000 UTC m=+143.987658210 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.371906 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:58 crc kubenswrapper[4919]: E0930 20:15:58.372592 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.872582811 +0000 UTC m=+143.988615938 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.435606 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5"] Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.474102 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:58 crc kubenswrapper[4919]: E0930 20:15:58.474961 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:58.974933561 +0000 UTC m=+144.090966698 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.587032 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:58 crc kubenswrapper[4919]: E0930 20:15:58.587948 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:59.08793502 +0000 UTC m=+144.203968147 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.623972 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg"] Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.658552 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz"] Sep 30 20:15:58 crc kubenswrapper[4919]: W0930 20:15:58.687604 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod764e2fc0_f6af_45a8_8a90_f78ce95abf62.slice/crio-ed5777404afc9301c99ac09cb4bd58bb23d2f0c7e9f2f4ba84a8b8e78213c19c WatchSource:0}: Error finding container ed5777404afc9301c99ac09cb4bd58bb23d2f0c7e9f2f4ba84a8b8e78213c19c: Status 404 returned error can't find the container with id ed5777404afc9301c99ac09cb4bd58bb23d2f0c7e9f2f4ba84a8b8e78213c19c Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.697660 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:58 crc kubenswrapper[4919]: E0930 20:15:58.698130 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:59.198111066 +0000 UTC m=+144.314144193 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.724643 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4h84d" event={"ID":"1c528181-4537-450e-b8b8-23b70b25a9c8","Type":"ContainerStarted","Data":"3a025782ff400d39ac87f5af92470b34422aad5bcbb862e8caeb1b50ee8f7950"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.740483 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" event={"ID":"23070aa6-f355-494e-b108-a3fba285cd2c","Type":"ContainerStarted","Data":"f96f605ae859fffe77f0ccc6e6787634a65f26b077ffa253d985401f76527700"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.760071 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" event={"ID":"c5ed7546-9652-448f-ac46-f4325cd00b24","Type":"ContainerStarted","Data":"0e8d3c968d3858d7d6fce6d5d22fdcb0e4e772f1c98c75b039ee1dc65a7dbada"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.766639 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" event={"ID":"e7478385-0c3c-4f02-ab80-fc2258357773","Type":"ContainerStarted","Data":"7a892c664b92ae841a34930b2fa3b31a07b28ce4ee8dc2116a8a9d8227f4613c"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.772717 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-hj49j" event={"ID":"26051892-8dc6-4bf1-a7ba-0e9df1dea6e2","Type":"ContainerStarted","Data":"727e196b9a4d3d9b4a0b7824399dcf56d99e967e9896ba7a8337917de0eb3dbb"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.793033 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2d7ng" event={"ID":"23df7f75-7b68-4810-92a4-b0e7e39f9bf4","Type":"ContainerStarted","Data":"82efa40b4058c679df62a5e073fa0ddcbf58cbc3d0ce8d6c85da4628f921e2c4"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.793075 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2d7ng" event={"ID":"23df7f75-7b68-4810-92a4-b0e7e39f9bf4","Type":"ContainerStarted","Data":"65311a8172256f42bfcd2000955bca52786ab163afe4b14d0415d59f0de52d96"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.795876 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-2d7ng" Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.799804 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:58 crc kubenswrapper[4919]: E0930 20:15:58.800067 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:59.300056705 +0000 UTC m=+144.416089832 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.802643 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-c55sj" event={"ID":"c019c27b-0131-498d-b84a-6c79511d176e","Type":"ContainerStarted","Data":"85776c7b7b8924ee854b68e600e348b1152370e349eaaade11a61375659f70e9"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.803652 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" event={"ID":"f34b8c46-00c7-483c-b446-67990101e057","Type":"ContainerStarted","Data":"f186309d400bd21ec2d06a34c8f404cdae6cb3258d6f7369734484b1a2e14698"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.807342 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" event={"ID":"cd4a5780-a1bb-4918-b54e-afd17c1dd9e1","Type":"ContainerStarted","Data":"bd034c3b02f6cbe4df89e749ab53acbae9445e236c087c8a5929493d3d6a0b9f"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.807363 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" event={"ID":"cd4a5780-a1bb-4918-b54e-afd17c1dd9e1","Type":"ContainerStarted","Data":"fbe8a17ec9ff86a8f42116ef110213467fb9bde39674d48a800fd173f6674299"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.822435 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" event={"ID":"f25ddd0d-0dc9-442d-a893-6562032c3b95","Type":"ContainerStarted","Data":"e1b0c14066ede69861ebada73bcb19ff87a144a28c9c0044de0044abb0ba25f4"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.843565 4919 patch_prober.go:28] interesting pod/downloads-7954f5f757-2d7ng container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.843630 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2d7ng" podUID="23df7f75-7b68-4810-92a4-b0e7e39f9bf4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.876259 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" podStartSLOduration=121.876200957 podStartE2EDuration="2m1.876200957s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:58.826084158 +0000 UTC m=+143.942117305" watchObservedRunningTime="2025-09-30 20:15:58.876200957 +0000 UTC m=+143.992234084" Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.877445 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" event={"ID":"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3","Type":"ContainerStarted","Data":"4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.877493 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" event={"ID":"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3","Type":"ContainerStarted","Data":"c8c456c16629efaaaf1c8ebbf7b2f9bad6d8d99044f8811c787d5b2123e17008"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.878496 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.905165 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" event={"ID":"4599a59e-3533-494d-b149-f84b3033c62c","Type":"ContainerStarted","Data":"afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.908272 4919 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-m9k8f container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.908303 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" podUID="4599a59e-3533-494d-b149-f84b3033c62c" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.908698 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:58 crc kubenswrapper[4919]: E0930 20:15:58.909507 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:59.409442509 +0000 UTC m=+144.525475636 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.909612 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:58 crc kubenswrapper[4919]: E0930 20:15:58.910897 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:59.41087411 +0000 UTC m=+144.526907237 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.911465 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-k46x5"] Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.923099 4919 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-8brdq container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.923150 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" podUID="dc9cfa84-ad26-4f6a-aaea-75ee71060ff3" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.931359 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6" event={"ID":"2fbcced3-31b3-462e-af0f-9d80537d7d55","Type":"ContainerStarted","Data":"276d5d920eaab979bca28c5c6547084b44519656bc0acad993ed72146346fb01"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.931418 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6" event={"ID":"2fbcced3-31b3-462e-af0f-9d80537d7d55","Type":"ContainerStarted","Data":"6b7f43e90c2dc27c498eaa02154659e348db60e69ebf67f21aacbc21b03a284e"} Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.968955 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-68tmn"] Sep 30 20:15:58 crc kubenswrapper[4919]: I0930 20:15:58.991355 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" event={"ID":"89c9a653-2d79-4af8-9ee0-04dd3058a692","Type":"ContainerStarted","Data":"987b19b7ca3a42393b0dfc1ec404c955eee8aa961a3e81c48d04b9f09b45d080"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.003032 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks"] Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.005942 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk"] Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.010970 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:59 crc kubenswrapper[4919]: E0930 20:15:59.011791 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:59.511749368 +0000 UTC m=+144.627782495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.022887 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-z2hvv"] Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.036971 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" event={"ID":"bbebe3c4-0f6b-4779-8a37-5c716c90c409","Type":"ContainerStarted","Data":"401f12aa830ab5c1374b478309c2229af48ca3ed842447e0afc4b6634d558354"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.040915 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-p5sdz" event={"ID":"31ef9900-22cb-4eb9-ab61-5b378f168126","Type":"ContainerStarted","Data":"f4069c6d4359753b5385999f4a236d098c9befdbb7fd01a9501ff4e4e6a6505d"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.042769 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" event={"ID":"aed3f144-f330-47b6-b73b-5b079ba9f89d","Type":"ContainerStarted","Data":"f1ea8f8cbebfcf44ee664bfc1d4b2a33fbd960c11e0ba33912c2cbd7b39779b4"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.045561 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" event={"ID":"fc156064-2b1c-47c2-b91c-b7318dacb213","Type":"ContainerStarted","Data":"2619cfc5df2950ba8d6ea01c5ab73312be5a0aaf9a6bcac0549db441bbb793cd"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.054447 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" event={"ID":"42c8738a-aad8-4cc5-b18f-92eee2745673","Type":"ContainerStarted","Data":"1857921e867b9189272ed41ddf38fe806199c5609c64beb36763c520478164d1"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.054774 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.078032 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" event={"ID":"d3447472-d94d-4984-9b19-591fec8cc4b2","Type":"ContainerStarted","Data":"f4cb08727123b50df3f4003d7dee781fe5d9816e6528f4cd018db30dc231e7da"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.104852 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2lzkb" event={"ID":"cdba9d13-b3b5-4a75-adf1-9b14ac993af1","Type":"ContainerStarted","Data":"15d8a159bbaef818a77863d293b29ecfadc7eebf9921a961ce3b4cebd1d914db"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.104900 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2lzkb" event={"ID":"cdba9d13-b3b5-4a75-adf1-9b14ac993af1","Type":"ContainerStarted","Data":"7c4f0467223185089839d5a9804855763ef27454127e7e6e44de83c84916990e"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.105793 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.111675 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-gq4l8" event={"ID":"e19e9e5b-3b2f-41ab-943c-e114f7613991","Type":"ContainerStarted","Data":"35efa86d60c6ec314073a5b87b3a8c8ebb144092eb3c82e2d37c844507550019"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.123350 4919 patch_prober.go:28] interesting pod/console-operator-58897d9998-2lzkb container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.123403 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-2lzkb" podUID="cdba9d13-b3b5-4a75-adf1-9b14ac993af1" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.15:8443/readyz\": dial tcp 10.217.0.15:8443: connect: connection refused" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.125282 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:59 crc kubenswrapper[4919]: E0930 20:15:59.125748 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:59.625732285 +0000 UTC m=+144.741765412 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.158540 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-qxlpx" event={"ID":"2d053914-edeb-49d0-bffa-b6d63885a5fb","Type":"ContainerStarted","Data":"673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.158608 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-qxlpx" event={"ID":"2d053914-edeb-49d0-bffa-b6d63885a5fb","Type":"ContainerStarted","Data":"f96619556c058ee8d3dad9046a7c95e1738ff4d9d46c9a0fed8f869285065430"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.171881 4919 generic.go:334] "Generic (PLEG): container finished" podID="d858bbb5-c348-42d5-882f-03a21a91cbeb" containerID="e8233590f746eff7b17ef39801dbba36b97a1ff64151f78fe31f5d8ba2f97512" exitCode=0 Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.172050 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" event={"ID":"d858bbb5-c348-42d5-882f-03a21a91cbeb","Type":"ContainerDied","Data":"e8233590f746eff7b17ef39801dbba36b97a1ff64151f78fe31f5d8ba2f97512"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.172120 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" event={"ID":"d858bbb5-c348-42d5-882f-03a21a91cbeb","Type":"ContainerStarted","Data":"7c8520f533ab681686748dd8886cfbc6d9276777e3da28cd115c8a2a9739db94"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.176977 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" event={"ID":"1724db9f-c072-42c6-a26b-5953c9656668","Type":"ContainerStarted","Data":"4c68b250c2bf1709d0a1dbd652ad94cdddba0e2ae1efd8c2fba07ab9d065f9bc"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.177882 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.179748 4919 generic.go:334] "Generic (PLEG): container finished" podID="98d93a4f-32ca-41db-9776-8bf3bad8727d" containerID="2f632e72af31bbc4ae90c5a3d4622397665c7d2bfaf8cdad33e1b4fdbbbd00de" exitCode=0 Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.180025 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xns2c" event={"ID":"98d93a4f-32ca-41db-9776-8bf3bad8727d","Type":"ContainerDied","Data":"2f632e72af31bbc4ae90c5a3d4622397665c7d2bfaf8cdad33e1b4fdbbbd00de"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.185047 4919 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-npd4c container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.185104 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" podUID="1724db9f-c072-42c6-a26b-5953c9656668" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.202732 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" event={"ID":"27b68b20-5ad0-4c14-b3cd-31f070fb3ab6","Type":"ContainerStarted","Data":"05f46b2d09c27fb08ac9d5f245b21b45b61076255675f9389d16cec3cc3b2f66"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.211820 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" event={"ID":"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56","Type":"ContainerStarted","Data":"8717ee71c9ee9b2b585b5f65f55ee8a8411cd5232e979fcf9c37fb7370bd0698"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.215902 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s"] Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.226671 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:59 crc kubenswrapper[4919]: E0930 20:15:59.228676 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:59.728640231 +0000 UTC m=+144.844673358 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.245010 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5"] Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.245424 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" event={"ID":"61462d92-fbc3-462c-b847-7c5f5e8e457a","Type":"ContainerStarted","Data":"a3744f68bb267b7244569803ca8f8f47f30b5e95e3da2896edb0ffad042042fa"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.248533 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-chftj"] Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.261407 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" event={"ID":"6da7486f-8911-4897-bf58-165a98baf2f8","Type":"ContainerStarted","Data":"f40984cedd5e1cb257beb3faa585d817548cb38bc0c089d0f40c73546b678bd2"} Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.261467 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" event={"ID":"6da7486f-8911-4897-bf58-165a98baf2f8","Type":"ContainerStarted","Data":"48c7693de4a95ebb487e128bb2d9c0ac82bb2418d84b5c0cc4e20a2462976d7b"} Sep 30 20:15:59 crc kubenswrapper[4919]: W0930 20:15:59.273203 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf75f4e78_ba83_436e_9f99_6156d6d065f4.slice/crio-337fc384a26241b717b3d6ddff79b291b07bd7534ecf6e73440ae895e0a0e355 WatchSource:0}: Error finding container 337fc384a26241b717b3d6ddff79b291b07bd7534ecf6e73440ae895e0a0e355: Status 404 returned error can't find the container with id 337fc384a26241b717b3d6ddff79b291b07bd7534ecf6e73440ae895e0a0e355 Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.329017 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:59 crc kubenswrapper[4919]: E0930 20:15:59.330198 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:15:59.830179528 +0000 UTC m=+144.946212655 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:59 crc kubenswrapper[4919]: W0930 20:15:59.359807 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfec5f277_cd5b_43e4_a996_890b4e736f42.slice/crio-874dfae017d454b1dc8f6611418e13aeb6702550eb1e5e5daad7469fd6128258 WatchSource:0}: Error finding container 874dfae017d454b1dc8f6611418e13aeb6702550eb1e5e5daad7469fd6128258: Status 404 returned error can't find the container with id 874dfae017d454b1dc8f6611418e13aeb6702550eb1e5e5daad7469fd6128258 Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.432637 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:59 crc kubenswrapper[4919]: E0930 20:15:59.433001 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:15:59.932983452 +0000 UTC m=+145.049016579 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.534783 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:59 crc kubenswrapper[4919]: E0930 20:15:59.535163 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:00.035147597 +0000 UTC m=+145.151180724 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.626684 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-jjkdp" podStartSLOduration=122.626669814 podStartE2EDuration="2m2.626669814s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:59.625124029 +0000 UTC m=+144.741157156" watchObservedRunningTime="2025-09-30 20:15:59.626669814 +0000 UTC m=+144.742702931" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.627695 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-kngwc" podStartSLOduration=123.627690033 podStartE2EDuration="2m3.627690033s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:59.598067506 +0000 UTC m=+144.714100623" watchObservedRunningTime="2025-09-30 20:15:59.627690033 +0000 UTC m=+144.743723160" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.640329 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:59 crc kubenswrapper[4919]: E0930 20:15:59.642241 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:00.142226934 +0000 UTC m=+145.258260051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.674803 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-4phzk" podStartSLOduration=123.674783315 podStartE2EDuration="2m3.674783315s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:59.674302461 +0000 UTC m=+144.790335598" watchObservedRunningTime="2025-09-30 20:15:59.674783315 +0000 UTC m=+144.790816472" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.742181 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:59 crc kubenswrapper[4919]: E0930 20:15:59.742697 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:00.242680979 +0000 UTC m=+145.358714106 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.767716 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-2d7ng" podStartSLOduration=122.767700343 podStartE2EDuration="2m2.767700343s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:59.767077365 +0000 UTC m=+144.883110492" watchObservedRunningTime="2025-09-30 20:15:59.767700343 +0000 UTC m=+144.883733470" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.818907 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" podStartSLOduration=122.818889334 podStartE2EDuration="2m2.818889334s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:59.816729571 +0000 UTC m=+144.932762698" watchObservedRunningTime="2025-09-30 20:15:59.818889334 +0000 UTC m=+144.934922461" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.832436 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-mp48h" podStartSLOduration=122.832421745 podStartE2EDuration="2m2.832421745s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:59.831591851 +0000 UTC m=+144.947624978" watchObservedRunningTime="2025-09-30 20:15:59.832421745 +0000 UTC m=+144.948454872" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.845640 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:15:59 crc kubenswrapper[4919]: E0930 20:15:59.846023 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:00.346009368 +0000 UTC m=+145.462042495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.883286 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-qxlpx" podStartSLOduration=123.883269125 podStartE2EDuration="2m3.883269125s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:59.880612288 +0000 UTC m=+144.996645415" watchObservedRunningTime="2025-09-30 20:15:59.883269125 +0000 UTC m=+144.999302242" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.912018 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" podStartSLOduration=122.912000796 podStartE2EDuration="2m2.912000796s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:59.910644427 +0000 UTC m=+145.026677554" watchObservedRunningTime="2025-09-30 20:15:59.912000796 +0000 UTC m=+145.028033923" Sep 30 20:15:59 crc kubenswrapper[4919]: I0930 20:15:59.949030 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:15:59 crc kubenswrapper[4919]: E0930 20:15:59.949817 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:00.44980453 +0000 UTC m=+145.565837657 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.025051 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-j9pft" podStartSLOduration=123.025033976 podStartE2EDuration="2m3.025033976s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:15:59.96571292 +0000 UTC m=+145.081746047" watchObservedRunningTime="2025-09-30 20:16:00.025033976 +0000 UTC m=+145.141067103" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.055144 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:00 crc kubenswrapper[4919]: E0930 20:16:00.055498 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:00.555484686 +0000 UTC m=+145.671517803 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.062906 4919 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-s5kls container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.6:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.062963 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" podUID="42c8738a-aad8-4cc5-b18f-92eee2745673" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.6:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.067824 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" podStartSLOduration=124.067807243 podStartE2EDuration="2m4.067807243s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.063520249 +0000 UTC m=+145.179553376" watchObservedRunningTime="2025-09-30 20:16:00.067807243 +0000 UTC m=+145.183840370" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.134251 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" podStartSLOduration=123.134230674 podStartE2EDuration="2m3.134230674s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.105427681 +0000 UTC m=+145.221460798" watchObservedRunningTime="2025-09-30 20:16:00.134230674 +0000 UTC m=+145.250263801" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.135030 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6mx92" podStartSLOduration=124.135026537 podStartE2EDuration="2m4.135026537s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.13373297 +0000 UTC m=+145.249766097" watchObservedRunningTime="2025-09-30 20:16:00.135026537 +0000 UTC m=+145.251059664" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.181771 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:00 crc kubenswrapper[4919]: E0930 20:16:00.182088 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:00.682076098 +0000 UTC m=+145.798109225 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.282948 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:00 crc kubenswrapper[4919]: E0930 20:16:00.283709 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:00.783694407 +0000 UTC m=+145.899727534 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.373456 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" event={"ID":"f75f4e78-ba83-436e-9f99-6156d6d065f4","Type":"ContainerStarted","Data":"337fc384a26241b717b3d6ddff79b291b07bd7534ecf6e73440ae895e0a0e355"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.388938 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:00 crc kubenswrapper[4919]: E0930 20:16:00.389315 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:00.889304441 +0000 UTC m=+146.005337568 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.392318 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" event={"ID":"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56","Type":"ContainerStarted","Data":"761e7912c0cba0230c931f1b94a76da5d7a6eff031f6af3f80f084782b6a15e7"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.392358 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" event={"ID":"f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56","Type":"ContainerStarted","Data":"550c0651203e580af46a32fef0a268b5b07b9d1f7ad3bbef0d5f8c6e1cc5e06b"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.409609 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-p5sdz" event={"ID":"31ef9900-22cb-4eb9-ab61-5b378f168126","Type":"ContainerStarted","Data":"67f6505d0c6fd342bbd80e42ce8e81c9d0f557f7756f9ec377dccf81557ec96d"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.415592 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" event={"ID":"aed3f144-f330-47b6-b73b-5b079ba9f89d","Type":"ContainerStarted","Data":"f591a37cee6245cfb7ea4be57e9bbbc325a4dcab86879e69704bc5fd549efd7a"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.435708 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" event={"ID":"fc156064-2b1c-47c2-b91c-b7318dacb213","Type":"ContainerStarted","Data":"f12c2ebdd9a3bdf99828206a8651145405ee01c74d105c4ee676e1552a45c8e6"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.448172 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-2lzkb" podStartSLOduration=124.448158703 podStartE2EDuration="2m4.448158703s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.203471767 +0000 UTC m=+145.319504894" watchObservedRunningTime="2025-09-30 20:16:00.448158703 +0000 UTC m=+145.564191830" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.448416 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-fdmjq" podStartSLOduration=123.44841155 podStartE2EDuration="2m3.44841155s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.434696224 +0000 UTC m=+145.550729351" watchObservedRunningTime="2025-09-30 20:16:00.44841155 +0000 UTC m=+145.564444677" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.462801 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-c55sj" event={"ID":"c019c27b-0131-498d-b84a-6c79511d176e","Type":"ContainerStarted","Data":"98dabec024a8a0e63e2516395552b098a3b19ed7835b6e8b4423019692b6abb4"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.493043 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" event={"ID":"3abeda0a-0453-46a4-b73e-eccafb442e4d","Type":"ContainerStarted","Data":"098d141e85e2b5f5acedcfd161a52fd6fec2dc60a1e65ead600323da7e9479fb"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.493835 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:00 crc kubenswrapper[4919]: E0930 20:16:00.494316 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:00.994276537 +0000 UTC m=+146.110309664 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.496035 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4h84d" event={"ID":"1c528181-4537-450e-b8b8-23b70b25a9c8","Type":"ContainerStarted","Data":"2bed9f23e5d7f92b579a45e84e7301d2ab60089b5dcb728e87761d13b1f10a4c"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.501071 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg" event={"ID":"764e2fc0-f6af-45a8-8a90-f78ce95abf62","Type":"ContainerStarted","Data":"67d33d95efadff087ee0612872cdcfe5499cf2c8746739fd2f33553cf059c920"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.501137 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg" event={"ID":"764e2fc0-f6af-45a8-8a90-f78ce95abf62","Type":"ContainerStarted","Data":"ed5777404afc9301c99ac09cb4bd58bb23d2f0c7e9f2f4ba84a8b8e78213c19c"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.514343 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-h4fv8" podStartSLOduration=123.514324876 podStartE2EDuration="2m3.514324876s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.478907972 +0000 UTC m=+145.594941099" watchObservedRunningTime="2025-09-30 20:16:00.514324876 +0000 UTC m=+145.630357993" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.530856 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" event={"ID":"23070aa6-f355-494e-b108-a3fba285cd2c","Type":"ContainerStarted","Data":"0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.532069 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.550325 4919 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-khpgg container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.550381 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" podUID="23070aa6-f355-494e-b108-a3fba285cd2c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.565059 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" event={"ID":"c5ed7546-9652-448f-ac46-f4325cd00b24","Type":"ContainerStarted","Data":"a35f1b09f2190363f4dfccc375489bde19158e5e25391811121ffe0dc54ea6c7"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.596743 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" event={"ID":"65daa38d-8652-4438-af0e-5afc3524e5d4","Type":"ContainerStarted","Data":"4dc1998eee24c1ae8153d11e0cdcafb95d28c54bd2ba714c757ef2e7d4ba32d3"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.598188 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.599444 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-lfnjg" podStartSLOduration=123.599425558 podStartE2EDuration="2m3.599425558s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.597386939 +0000 UTC m=+145.713420066" watchObservedRunningTime="2025-09-30 20:16:00.599425558 +0000 UTC m=+145.715458685" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.599933 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-p5sdz" podStartSLOduration=6.599926702 podStartE2EDuration="6.599926702s" podCreationTimestamp="2025-09-30 20:15:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.527312452 +0000 UTC m=+145.643345579" watchObservedRunningTime="2025-09-30 20:16:00.599926702 +0000 UTC m=+145.715959829" Sep 30 20:16:00 crc kubenswrapper[4919]: E0930 20:16:00.602487 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:01.102472966 +0000 UTC m=+146.218506183 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.649036 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-chftj" event={"ID":"fec5f277-cd5b-43e4-a996-890b4e736f42","Type":"ContainerStarted","Data":"874dfae017d454b1dc8f6611418e13aeb6702550eb1e5e5daad7469fd6128258"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.675030 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-68tmn" event={"ID":"0a9f6d48-6413-4624-9598-615b4f16382f","Type":"ContainerStarted","Data":"4f77b4d76dea8de6780e0507789d5cbba5865ab34e1bc5f77d2e5440f691aa1b"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.675084 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-68tmn" event={"ID":"0a9f6d48-6413-4624-9598-615b4f16382f","Type":"ContainerStarted","Data":"640e10cfaa128896d7e30c1d549aa8d3d2c22b62949e0bf05dfd6e381dead3eb"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.700249 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:00 crc kubenswrapper[4919]: E0930 20:16:00.701002 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:01.200981175 +0000 UTC m=+146.317014352 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.734296 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-k46x5" event={"ID":"a25ce588-b65d-4541-ba64-7a4219330a33","Type":"ContainerStarted","Data":"bfeb59125e2a9f4df0bc0bf9789a6d2ce8e4e37cf8d1fa3c42c9ca6fcbcc5039"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.735540 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-c55sj" podStartSLOduration=123.735526494 podStartE2EDuration="2m3.735526494s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.735087531 +0000 UTC m=+145.851120658" watchObservedRunningTime="2025-09-30 20:16:00.735526494 +0000 UTC m=+145.851559621" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.747546 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" event={"ID":"1da44c87-3dcd-4bf1-b898-df3cb2f860f8","Type":"ContainerStarted","Data":"f0b6aeb9c0ae36a071ef0bb116889a465317edd5bced6bf5db7bcc3b1de66c50"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.747589 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" event={"ID":"1da44c87-3dcd-4bf1-b898-df3cb2f860f8","Type":"ContainerStarted","Data":"a83b395bcba71592260d9bb9e7bd99d8c3ada28e10a5706646806f1c521ddcab"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.748014 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.753145 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" event={"ID":"bbebe3c4-0f6b-4779-8a37-5c716c90c409","Type":"ContainerStarted","Data":"3d214b8b087a375db2f5265fa38a65c05ada0c6060775b4711309c052496df0c"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.753932 4919 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-qbmcz container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" start-of-body= Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.753965 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" podUID="1da44c87-3dcd-4bf1-b898-df3cb2f860f8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.36:5443/healthz\": dial tcp 10.217.0.36:5443: connect: connection refused" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.774353 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" event={"ID":"61462d92-fbc3-462c-b847-7c5f5e8e457a","Type":"ContainerStarted","Data":"c5f805295a6dfcaa68f12d3c010b6cfbb5b63840200fea7b01e2585d68cc2359"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.774396 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" event={"ID":"61462d92-fbc3-462c-b847-7c5f5e8e457a","Type":"ContainerStarted","Data":"02565af06ed418808d16c90a14b865b0e8dba3569d185656ac8a979d24104bda"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.778291 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.782912 4919 patch_prober.go:28] interesting pod/router-default-5444994796-c55sj container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.782949 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c55sj" podUID="c019c27b-0131-498d-b84a-6c79511d176e" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.796977 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" event={"ID":"f34b8c46-00c7-483c-b446-67990101e057","Type":"ContainerStarted","Data":"fa950ef98aaa68cbc52db76372918a649c55a153a22961f631e04b9146f8bfab"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.800854 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-z2hvv" event={"ID":"0dda3300-8f42-4f22-b2f0-7a5235e607ef","Type":"ContainerStarted","Data":"4e120a3777b749fa3a3af3e36d42461f535b12bdcb9840db2a49688bba4d8277"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.801519 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:00 crc kubenswrapper[4919]: E0930 20:16:00.803737 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:01.303721267 +0000 UTC m=+146.419754384 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.816052 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-hj49j" event={"ID":"26051892-8dc6-4bf1-a7ba-0e9df1dea6e2","Type":"ContainerStarted","Data":"8c3072d50c6896dc25c1637cdf0070cf3ec2e31aa681c169013fe8931afc2d87"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.830156 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xxkkd" podStartSLOduration=123.83010911 podStartE2EDuration="2m3.83010911s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.798700761 +0000 UTC m=+145.914733888" watchObservedRunningTime="2025-09-30 20:16:00.83010911 +0000 UTC m=+145.946142237" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.861072 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6" event={"ID":"2fbcced3-31b3-462e-af0f-9d80537d7d55","Type":"ContainerStarted","Data":"a892d6d9ccc6f8076624a088baa02ea892652181545d1873052a4ec3ea4c5777"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.875065 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" podStartSLOduration=123.87505012 podStartE2EDuration="2m3.87505012s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.832803078 +0000 UTC m=+145.948836205" watchObservedRunningTime="2025-09-30 20:16:00.87505012 +0000 UTC m=+145.991083247" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.879608 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" event={"ID":"8e136a30-1aed-41b9-a85e-c89fa3811e25","Type":"ContainerStarted","Data":"6aa1690ae2fa9f9379f0f5cedb7889663f26d073df74d774139b64b9871e5c3e"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.884763 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-68tmn" podStartSLOduration=6.88474719 podStartE2EDuration="6.88474719s" podCreationTimestamp="2025-09-30 20:15:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.877551912 +0000 UTC m=+145.993585039" watchObservedRunningTime="2025-09-30 20:16:00.88474719 +0000 UTC m=+146.000780317" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.904338 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:00 crc kubenswrapper[4919]: E0930 20:16:00.908831 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:01.408807956 +0000 UTC m=+146.524841083 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.910675 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" podStartSLOduration=123.910654269 podStartE2EDuration="2m3.910654269s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.90927033 +0000 UTC m=+146.025303457" watchObservedRunningTime="2025-09-30 20:16:00.910654269 +0000 UTC m=+146.026687396" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.915846 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" event={"ID":"d858bbb5-c348-42d5-882f-03a21a91cbeb","Type":"ContainerStarted","Data":"424688353ba047801421d68b0812f51158fb4d5e6ba1f5c90fff2dc13215e480"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.916757 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.972491 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" event={"ID":"1724db9f-c072-42c6-a26b-5953c9656668","Type":"ContainerStarted","Data":"50bd96171dd59746a2f77cb26adecc5e526b76fa583a025b5ba54db7fbd9c4c0"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.989864 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-npd4c" Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.995310 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" event={"ID":"27b68b20-5ad0-4c14-b3cd-31f070fb3ab6","Type":"ContainerStarted","Data":"a70d01a27f7b943038b0ad9a99e8e1d526ac27d0fa2d816da40937aaf91837dd"} Sep 30 20:16:00 crc kubenswrapper[4919]: I0930 20:16:00.995875 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:00.997144 4919 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-bb7h5 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.41:8443/healthz\": dial tcp 10.217.0.41:8443: connect: connection refused" start-of-body= Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:00.997174 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" podUID="27b68b20-5ad0-4c14-b3cd-31f070fb3ab6" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.41:8443/healthz\": dial tcp 10.217.0.41:8443: connect: connection refused" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:00.997701 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" podStartSLOduration=60.997682527 podStartE2EDuration="1m0.997682527s" podCreationTimestamp="2025-09-30 20:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.956585218 +0000 UTC m=+146.072618345" watchObservedRunningTime="2025-09-30 20:16:00.997682527 +0000 UTC m=+146.113715654" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:00.999188 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-chftj" podStartSLOduration=123.99918202 podStartE2EDuration="2m3.99918202s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:00.994019121 +0000 UTC m=+146.110052238" watchObservedRunningTime="2025-09-30 20:16:00.99918202 +0000 UTC m=+146.115215147" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.006677 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:01 crc kubenswrapper[4919]: E0930 20:16:01.007397 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:01.507379317 +0000 UTC m=+146.623412444 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.073033 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-gq4l8" event={"ID":"e19e9e5b-3b2f-41ab-943c-e114f7613991","Type":"ContainerStarted","Data":"5190f05f484ccdb16e92d696e2f170b206a9a5d8ae66df24bcea6377eb719953"} Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.088070 4919 patch_prober.go:28] interesting pod/downloads-7954f5f757-2d7ng container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.088118 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2d7ng" podUID="23df7f75-7b68-4810-92a4-b0e7e39f9bf4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.089020 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-nmz9x" podStartSLOduration=124.089003898 podStartE2EDuration="2m4.089003898s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:01.037794377 +0000 UTC m=+146.153827504" watchObservedRunningTime="2025-09-30 20:16:01.089003898 +0000 UTC m=+146.205037025" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.098847 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.099163 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.099207 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.108058 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:01 crc kubenswrapper[4919]: E0930 20:16:01.108821 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:01.608805711 +0000 UTC m=+146.724838828 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.119618 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" podStartSLOduration=124.119600903 podStartE2EDuration="2m4.119600903s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:01.119123219 +0000 UTC m=+146.235156346" watchObservedRunningTime="2025-09-30 20:16:01.119600903 +0000 UTC m=+146.235634030" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.120076 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" podStartSLOduration=124.120071717 podStartE2EDuration="2m4.120071717s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:01.088813853 +0000 UTC m=+146.204846980" watchObservedRunningTime="2025-09-30 20:16:01.120071717 +0000 UTC m=+146.236104844" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.154266 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-prx6x" podStartSLOduration=124.154243435 podStartE2EDuration="2m4.154243435s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:01.154126312 +0000 UTC m=+146.270159439" watchObservedRunningTime="2025-09-30 20:16:01.154243435 +0000 UTC m=+146.270276562" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.235571 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:01 crc kubenswrapper[4919]: E0930 20:16:01.254262 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:01.754244728 +0000 UTC m=+146.870277855 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.293502 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" podStartSLOduration=124.293483013 podStartE2EDuration="2m4.293483013s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:01.290465865 +0000 UTC m=+146.406498992" watchObservedRunningTime="2025-09-30 20:16:01.293483013 +0000 UTC m=+146.409516140" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.326294 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-gq4l8" podStartSLOduration=124.326268681 podStartE2EDuration="2m4.326268681s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:01.325865859 +0000 UTC m=+146.441898986" watchObservedRunningTime="2025-09-30 20:16:01.326268681 +0000 UTC m=+146.442301828" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.337874 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:01 crc kubenswrapper[4919]: E0930 20:16:01.338164 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:01.838147184 +0000 UTC m=+146.954180311 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.358606 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" podStartSLOduration=125.358587766 podStartE2EDuration="2m5.358587766s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:01.358160723 +0000 UTC m=+146.474193850" watchObservedRunningTime="2025-09-30 20:16:01.358587766 +0000 UTC m=+146.474620893" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.439412 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:01 crc kubenswrapper[4919]: E0930 20:16:01.439921 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:01.939904358 +0000 UTC m=+147.055937485 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.494020 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" podStartSLOduration=124.494005283 podStartE2EDuration="2m4.494005283s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:01.44032539 +0000 UTC m=+146.556358527" watchObservedRunningTime="2025-09-30 20:16:01.494005283 +0000 UTC m=+146.610038410" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.494343 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wq9f6" podStartSLOduration=125.494340412 podStartE2EDuration="2m5.494340412s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:01.491612193 +0000 UTC m=+146.607645320" watchObservedRunningTime="2025-09-30 20:16:01.494340412 +0000 UTC m=+146.610373539" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.540942 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:01 crc kubenswrapper[4919]: E0930 20:16:01.541307 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:02.04129092 +0000 UTC m=+147.157324047 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.605923 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.607172 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.639362 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.644563 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:01 crc kubenswrapper[4919]: E0930 20:16:01.645028 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:02.1450118 +0000 UTC m=+147.261044927 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.749054 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:01 crc kubenswrapper[4919]: E0930 20:16:01.749662 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:02.249639466 +0000 UTC m=+147.365672593 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.788410 4919 patch_prober.go:28] interesting pod/router-default-5444994796-c55sj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 20:16:01 crc kubenswrapper[4919]: [-]has-synced failed: reason withheld Sep 30 20:16:01 crc kubenswrapper[4919]: [+]process-running ok Sep 30 20:16:01 crc kubenswrapper[4919]: healthz check failed Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.788465 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c55sj" podUID="c019c27b-0131-498d-b84a-6c79511d176e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.851678 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:01 crc kubenswrapper[4919]: E0930 20:16:01.852061 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:02.352047568 +0000 UTC m=+147.468080695 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.952662 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:01 crc kubenswrapper[4919]: E0930 20:16:01.952866 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:02.452836714 +0000 UTC m=+147.568869831 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:01 crc kubenswrapper[4919]: I0930 20:16:01.953490 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:01 crc kubenswrapper[4919]: E0930 20:16:01.953991 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:02.453969856 +0000 UTC m=+147.570002983 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.055330 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.055535 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:02.555509243 +0000 UTC m=+147.671542370 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.055803 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.056088 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:02.55607538 +0000 UTC m=+147.672108497 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.083831 4919 patch_prober.go:28] interesting pod/console-operator-58897d9998-2lzkb container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.15:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.083889 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-2lzkb" podUID="cdba9d13-b3b5-4a75-adf1-9b14ac993af1" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.15:8443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.085016 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5xplf" event={"ID":"c5ed7546-9652-448f-ac46-f4325cd00b24","Type":"ContainerStarted","Data":"bbc68279b816b49fc40b1c9af40cb233cc549e60d9e9845b0f998c8b8b965bb3"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.088123 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-chftj" event={"ID":"fec5f277-cd5b-43e4-a996-890b4e736f42","Type":"ContainerStarted","Data":"b00fd36571e8d7f43ffc80a3ac1fef418d876f5d8dfd8566a0d37682222428e5"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.099270 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" event={"ID":"f75f4e78-ba83-436e-9f99-6156d6d065f4","Type":"ContainerStarted","Data":"46e1996b2f5a39e700843b0cf8c17094eda6c2e98e6a755cc89a1eabf41c455d"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.099313 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" event={"ID":"f75f4e78-ba83-436e-9f99-6156d6d065f4","Type":"ContainerStarted","Data":"2c1876f461b22c0f270c8ca2773569b7c97479129b91b9f3f220c31bd13d0069"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.099354 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.112512 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-pxjnq" event={"ID":"f34b8c46-00c7-483c-b446-67990101e057","Type":"ContainerStarted","Data":"3eef6804cab90a176ed333cdbc8021f9d55f3236eeb0b32d2b82ba50f6f4b0bb"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.116573 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-z2hvv" event={"ID":"0dda3300-8f42-4f22-b2f0-7a5235e607ef","Type":"ContainerStarted","Data":"b08132bbc0a5f5262b1c3b1900a9d28ff7b2d380c091f6626d34498392952c84"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.116610 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-z2hvv" event={"ID":"0dda3300-8f42-4f22-b2f0-7a5235e607ef","Type":"ContainerStarted","Data":"7e3a68d3b0fda5a99c2baad80d69e72ad4f521ae5ca8ddd4d63df65589f6265e"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.118667 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xns2c" event={"ID":"98d93a4f-32ca-41db-9776-8bf3bad8727d","Type":"ContainerStarted","Data":"00be672e2a513ac3867f3361bdfc3a2c43059c0d37dc241f15dc5f2ffe072fc7"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.118690 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xns2c" event={"ID":"98d93a4f-32ca-41db-9776-8bf3bad8727d","Type":"ContainerStarted","Data":"5913661fb8196826e7b2372e77cc03ade2551e9bcfa5a76f6d0c870b26662ef8"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.120069 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-k46x5" event={"ID":"a25ce588-b65d-4541-ba64-7a4219330a33","Type":"ContainerStarted","Data":"8e097396b942796cfe1ca732d96b355ea636db68848ba3758694b43207b350ab"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.125754 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-gq4l8" event={"ID":"e19e9e5b-3b2f-41ab-943c-e114f7613991","Type":"ContainerStarted","Data":"67e5c8067399f9557fb41a0b4e981033a2f879dcdab4ac7c69f42c3281eba7af"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.133666 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bx5zk" event={"ID":"8e136a30-1aed-41b9-a85e-c89fa3811e25","Type":"ContainerStarted","Data":"8dcdd5ab65390660ae7ad80e97c70d45b4618d10d2d35db61a7d6b5dbb8693c6"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.136252 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" event={"ID":"65daa38d-8652-4438-af0e-5afc3524e5d4","Type":"ContainerStarted","Data":"408b54bc5e3dcd322bbbee70f478dbfdc1358b8065f2a123198fa1e5cf5ca212"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.138058 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" event={"ID":"3abeda0a-0453-46a4-b73e-eccafb442e4d","Type":"ContainerStarted","Data":"039affab313561a10390fef08dca122105eb0a99d78a3cc05058f948a8a0482b"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.146316 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" podStartSLOduration=125.146305969 podStartE2EDuration="2m5.146305969s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:02.14285274 +0000 UTC m=+147.258885867" watchObservedRunningTime="2025-09-30 20:16:02.146305969 +0000 UTC m=+147.262339096" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.151108 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4h84d" event={"ID":"1c528181-4537-450e-b8b8-23b70b25a9c8","Type":"ContainerStarted","Data":"98a6052dd927fedc1c877cda5976a7fce4763b748d4f442be11b83ed4b81708a"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.156980 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.157372 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:02.657354629 +0000 UTC m=+147.773387756 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.159057 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-hj49j" event={"ID":"26051892-8dc6-4bf1-a7ba-0e9df1dea6e2","Type":"ContainerStarted","Data":"6f3bb8fa6ddbe3df4febe5cbeeeca1fbbc304217f05b34c913710e3851222c9f"} Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.159094 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-hj49j" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.159430 4919 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-khpgg container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.159517 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" podUID="23070aa6-f355-494e-b108-a3fba285cd2c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.159976 4919 patch_prober.go:28] interesting pod/downloads-7954f5f757-2d7ng container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.160034 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2d7ng" podUID="23df7f75-7b68-4810-92a4-b0e7e39f9bf4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.176525 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pmpcw" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.201576 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-xns2c" podStartSLOduration=126.201561368 podStartE2EDuration="2m6.201561368s" podCreationTimestamp="2025-09-30 20:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:02.195007108 +0000 UTC m=+147.311040245" watchObservedRunningTime="2025-09-30 20:16:02.201561368 +0000 UTC m=+147.317594495" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.202641 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-bb7h5" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.257947 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.263958 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:02.763932802 +0000 UTC m=+147.879966149 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.283363 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-z2hvv" podStartSLOduration=125.283340053 podStartE2EDuration="2m5.283340053s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:02.231550445 +0000 UTC m=+147.347583572" watchObservedRunningTime="2025-09-30 20:16:02.283340053 +0000 UTC m=+147.399373180" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.298092 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-r9n6s" podStartSLOduration=125.298080579 podStartE2EDuration="2m5.298080579s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:02.281704426 +0000 UTC m=+147.397737543" watchObservedRunningTime="2025-09-30 20:16:02.298080579 +0000 UTC m=+147.414113706" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.360967 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.361433 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:02.861415281 +0000 UTC m=+147.977448408 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.428241 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-hj49j" podStartSLOduration=8.428207603 podStartE2EDuration="8.428207603s" podCreationTimestamp="2025-09-30 20:15:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:02.400030748 +0000 UTC m=+147.516063875" watchObservedRunningTime="2025-09-30 20:16:02.428207603 +0000 UTC m=+147.544240730" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.465646 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.469519 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:02.969499247 +0000 UTC m=+148.085532374 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.505188 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4h84d" podStartSLOduration=125.505157029 podStartE2EDuration="2m5.505157029s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:02.431635072 +0000 UTC m=+147.547668199" watchObservedRunningTime="2025-09-30 20:16:02.505157029 +0000 UTC m=+147.621190156" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.573752 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.574127 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.074112393 +0000 UTC m=+148.190145520 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.678128 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.678441 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.17843001 +0000 UTC m=+148.294463137 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.682388 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-qbmcz" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.779588 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.779635 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.279613067 +0000 UTC m=+148.395646194 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.780083 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.780415 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.28040368 +0000 UTC m=+148.396436807 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.789615 4919 patch_prober.go:28] interesting pod/router-default-5444994796-c55sj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 20:16:02 crc kubenswrapper[4919]: [-]has-synced failed: reason withheld Sep 30 20:16:02 crc kubenswrapper[4919]: [+]process-running ok Sep 30 20:16:02 crc kubenswrapper[4919]: healthz check failed Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.789688 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c55sj" podUID="c019c27b-0131-498d-b84a-6c79511d176e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.880735 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.880916 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.380892656 +0000 UTC m=+148.496925783 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.881034 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.881362 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.38135162 +0000 UTC m=+148.497384747 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.982091 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.982324 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.482290089 +0000 UTC m=+148.598323216 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.982414 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:02 crc kubenswrapper[4919]: E0930 20:16:02.982794 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.482783664 +0000 UTC m=+148.598816791 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:02 crc kubenswrapper[4919]: I0930 20:16:02.983273 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bvpwb" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.083588 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:03 crc kubenswrapper[4919]: E0930 20:16:03.083875 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.583834026 +0000 UTC m=+148.699867343 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.084073 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:03 crc kubenswrapper[4919]: E0930 20:16:03.084413 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.584397053 +0000 UTC m=+148.700430180 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.179053 4919 generic.go:334] "Generic (PLEG): container finished" podID="65daa38d-8652-4438-af0e-5afc3524e5d4" containerID="408b54bc5e3dcd322bbbee70f478dbfdc1358b8065f2a123198fa1e5cf5ca212" exitCode=0 Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.179697 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" event={"ID":"65daa38d-8652-4438-af0e-5afc3524e5d4","Type":"ContainerDied","Data":"408b54bc5e3dcd322bbbee70f478dbfdc1358b8065f2a123198fa1e5cf5ca212"} Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.184358 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-k46x5" event={"ID":"a25ce588-b65d-4541-ba64-7a4219330a33","Type":"ContainerStarted","Data":"8912f5689b277af57e23934431feac6db243dc40d3cac1f45a1287a9973c90ef"} Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.185185 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.185491 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:16:03 crc kubenswrapper[4919]: E0930 20:16:03.186000 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.685969761 +0000 UTC m=+148.802002888 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.186030 4919 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-khpgg container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.186066 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" podUID="23070aa6-f355-494e-b108-a3fba285cd2c" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.195957 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.287393 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.287640 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.287736 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.287832 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:16:03 crc kubenswrapper[4919]: E0930 20:16:03.288548 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.788529037 +0000 UTC m=+148.904562164 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.289601 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.296817 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.305830 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.388840 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:03 crc kubenswrapper[4919]: E0930 20:16:03.389922 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.889908609 +0000 UTC m=+149.005941726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.395808 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7f7p6"] Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.396730 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.402289 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.413044 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7f7p6"] Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.458986 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.475835 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.495049 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/283bf417-5302-4743-8a44-76fa61eba04b-utilities\") pod \"certified-operators-7f7p6\" (UID: \"283bf417-5302-4743-8a44-76fa61eba04b\") " pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.495358 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.495458 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d726b\" (UniqueName: \"kubernetes.io/projected/283bf417-5302-4743-8a44-76fa61eba04b-kube-api-access-d726b\") pod \"certified-operators-7f7p6\" (UID: \"283bf417-5302-4743-8a44-76fa61eba04b\") " pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.495499 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.495567 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/283bf417-5302-4743-8a44-76fa61eba04b-catalog-content\") pod \"certified-operators-7f7p6\" (UID: \"283bf417-5302-4743-8a44-76fa61eba04b\") " pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:03 crc kubenswrapper[4919]: E0930 20:16:03.495854 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:03.995842833 +0000 UTC m=+149.111875960 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.558405 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-54vv5"] Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.559585 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.564591 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.570737 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-54vv5"] Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.597723 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.597884 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/283bf417-5302-4743-8a44-76fa61eba04b-catalog-content\") pod \"certified-operators-7f7p6\" (UID: \"283bf417-5302-4743-8a44-76fa61eba04b\") " pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.597943 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/283bf417-5302-4743-8a44-76fa61eba04b-utilities\") pod \"certified-operators-7f7p6\" (UID: \"283bf417-5302-4743-8a44-76fa61eba04b\") " pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.597967 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d726b\" (UniqueName: \"kubernetes.io/projected/283bf417-5302-4743-8a44-76fa61eba04b-kube-api-access-d726b\") pod \"certified-operators-7f7p6\" (UID: \"283bf417-5302-4743-8a44-76fa61eba04b\") " pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:03 crc kubenswrapper[4919]: E0930 20:16:03.598137 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:04.098122922 +0000 UTC m=+149.214156049 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.598493 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/283bf417-5302-4743-8a44-76fa61eba04b-catalog-content\") pod \"certified-operators-7f7p6\" (UID: \"283bf417-5302-4743-8a44-76fa61eba04b\") " pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.598715 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/283bf417-5302-4743-8a44-76fa61eba04b-utilities\") pod \"certified-operators-7f7p6\" (UID: \"283bf417-5302-4743-8a44-76fa61eba04b\") " pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.633079 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d726b\" (UniqueName: \"kubernetes.io/projected/283bf417-5302-4743-8a44-76fa61eba04b-kube-api-access-d726b\") pod \"certified-operators-7f7p6\" (UID: \"283bf417-5302-4743-8a44-76fa61eba04b\") " pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.699000 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jlzp\" (UniqueName: \"kubernetes.io/projected/f4ca8312-ff90-418e-8503-8acc3f9d63b1-kube-api-access-4jlzp\") pod \"community-operators-54vv5\" (UID: \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\") " pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.699067 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4ca8312-ff90-418e-8503-8acc3f9d63b1-utilities\") pod \"community-operators-54vv5\" (UID: \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\") " pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.699094 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.699124 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4ca8312-ff90-418e-8503-8acc3f9d63b1-catalog-content\") pod \"community-operators-54vv5\" (UID: \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\") " pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:16:03 crc kubenswrapper[4919]: E0930 20:16:03.699461 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:04.199448882 +0000 UTC m=+149.315482009 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.710299 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.755782 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7grc2"] Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.756737 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.773921 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7grc2"] Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.780253 4919 patch_prober.go:28] interesting pod/router-default-5444994796-c55sj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 20:16:03 crc kubenswrapper[4919]: [-]has-synced failed: reason withheld Sep 30 20:16:03 crc kubenswrapper[4919]: [+]process-running ok Sep 30 20:16:03 crc kubenswrapper[4919]: healthz check failed Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.780289 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c55sj" podUID="c019c27b-0131-498d-b84a-6c79511d176e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.801032 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.801287 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jlzp\" (UniqueName: \"kubernetes.io/projected/f4ca8312-ff90-418e-8503-8acc3f9d63b1-kube-api-access-4jlzp\") pod \"community-operators-54vv5\" (UID: \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\") " pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.801349 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4ca8312-ff90-418e-8503-8acc3f9d63b1-utilities\") pod \"community-operators-54vv5\" (UID: \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\") " pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.801394 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4ca8312-ff90-418e-8503-8acc3f9d63b1-catalog-content\") pod \"community-operators-54vv5\" (UID: \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\") " pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:16:03 crc kubenswrapper[4919]: E0930 20:16:03.801940 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:04.301911366 +0000 UTC m=+149.417944533 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.802079 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4ca8312-ff90-418e-8503-8acc3f9d63b1-catalog-content\") pod \"community-operators-54vv5\" (UID: \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\") " pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.802234 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4ca8312-ff90-418e-8503-8acc3f9d63b1-utilities\") pod \"community-operators-54vv5\" (UID: \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\") " pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.802978 4919 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.818186 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jlzp\" (UniqueName: \"kubernetes.io/projected/f4ca8312-ff90-418e-8503-8acc3f9d63b1-kube-api-access-4jlzp\") pod \"community-operators-54vv5\" (UID: \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\") " pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.880735 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.902532 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:03 crc kubenswrapper[4919]: E0930 20:16:03.903434 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:04.403406242 +0000 UTC m=+149.519439369 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.903406 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a021e6-225d-498c-aa4c-008e2ad9580d-utilities\") pod \"certified-operators-7grc2\" (UID: \"14a021e6-225d-498c-aa4c-008e2ad9580d\") " pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.903519 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a021e6-225d-498c-aa4c-008e2ad9580d-catalog-content\") pod \"certified-operators-7grc2\" (UID: \"14a021e6-225d-498c-aa4c-008e2ad9580d\") " pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.903870 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dw4f\" (UniqueName: \"kubernetes.io/projected/14a021e6-225d-498c-aa4c-008e2ad9580d-kube-api-access-5dw4f\") pod \"certified-operators-7grc2\" (UID: \"14a021e6-225d-498c-aa4c-008e2ad9580d\") " pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.959386 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ddzjp"] Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.960599 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:16:03 crc kubenswrapper[4919]: W0930 20:16:03.961067 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-af5f6ed60a68b23e24e968a158e9bbc55414d77c42e13ae215e17260c9daa381 WatchSource:0}: Error finding container af5f6ed60a68b23e24e968a158e9bbc55414d77c42e13ae215e17260c9daa381: Status 404 returned error can't find the container with id af5f6ed60a68b23e24e968a158e9bbc55414d77c42e13ae215e17260c9daa381 Sep 30 20:16:03 crc kubenswrapper[4919]: I0930 20:16:03.966621 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ddzjp"] Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.019959 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7f7p6"] Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.020915 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.021281 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a021e6-225d-498c-aa4c-008e2ad9580d-utilities\") pod \"certified-operators-7grc2\" (UID: \"14a021e6-225d-498c-aa4c-008e2ad9580d\") " pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.021391 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a021e6-225d-498c-aa4c-008e2ad9580d-catalog-content\") pod \"certified-operators-7grc2\" (UID: \"14a021e6-225d-498c-aa4c-008e2ad9580d\") " pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.021532 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:04.521505417 +0000 UTC m=+149.637538544 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.021580 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vwg2\" (UniqueName: \"kubernetes.io/projected/dcb560ec-da35-4b84-86f5-e56a181c4194-kube-api-access-7vwg2\") pod \"community-operators-ddzjp\" (UID: \"dcb560ec-da35-4b84-86f5-e56a181c4194\") " pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.021680 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcb560ec-da35-4b84-86f5-e56a181c4194-catalog-content\") pod \"community-operators-ddzjp\" (UID: \"dcb560ec-da35-4b84-86f5-e56a181c4194\") " pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.021796 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcb560ec-da35-4b84-86f5-e56a181c4194-utilities\") pod \"community-operators-ddzjp\" (UID: \"dcb560ec-da35-4b84-86f5-e56a181c4194\") " pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.021862 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dw4f\" (UniqueName: \"kubernetes.io/projected/14a021e6-225d-498c-aa4c-008e2ad9580d-kube-api-access-5dw4f\") pod \"certified-operators-7grc2\" (UID: \"14a021e6-225d-498c-aa4c-008e2ad9580d\") " pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.021952 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.022277 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:04.522269059 +0000 UTC m=+149.638302186 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.023129 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a021e6-225d-498c-aa4c-008e2ad9580d-utilities\") pod \"certified-operators-7grc2\" (UID: \"14a021e6-225d-498c-aa4c-008e2ad9580d\") " pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.033604 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a021e6-225d-498c-aa4c-008e2ad9580d-catalog-content\") pod \"certified-operators-7grc2\" (UID: \"14a021e6-225d-498c-aa4c-008e2ad9580d\") " pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:16:04 crc kubenswrapper[4919]: W0930 20:16:04.043106 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod283bf417_5302_4743_8a44_76fa61eba04b.slice/crio-c41f811134543cf3b24f6bfc238bc8d2c37dfe3fd54b513f178dedb3971134eb WatchSource:0}: Error finding container c41f811134543cf3b24f6bfc238bc8d2c37dfe3fd54b513f178dedb3971134eb: Status 404 returned error can't find the container with id c41f811134543cf3b24f6bfc238bc8d2c37dfe3fd54b513f178dedb3971134eb Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.091874 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dw4f\" (UniqueName: \"kubernetes.io/projected/14a021e6-225d-498c-aa4c-008e2ad9580d-kube-api-access-5dw4f\") pod \"certified-operators-7grc2\" (UID: \"14a021e6-225d-498c-aa4c-008e2ad9580d\") " pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.123780 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.124066 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:04.624032942 +0000 UTC m=+149.740066069 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.124324 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcb560ec-da35-4b84-86f5-e56a181c4194-catalog-content\") pod \"community-operators-ddzjp\" (UID: \"dcb560ec-da35-4b84-86f5-e56a181c4194\") " pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.124385 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcb560ec-da35-4b84-86f5-e56a181c4194-utilities\") pod \"community-operators-ddzjp\" (UID: \"dcb560ec-da35-4b84-86f5-e56a181c4194\") " pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.124436 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.124465 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vwg2\" (UniqueName: \"kubernetes.io/projected/dcb560ec-da35-4b84-86f5-e56a181c4194-kube-api-access-7vwg2\") pod \"community-operators-ddzjp\" (UID: \"dcb560ec-da35-4b84-86f5-e56a181c4194\") " pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.125132 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcb560ec-da35-4b84-86f5-e56a181c4194-catalog-content\") pod \"community-operators-ddzjp\" (UID: \"dcb560ec-da35-4b84-86f5-e56a181c4194\") " pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.125528 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:04.625514195 +0000 UTC m=+149.741547312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.125546 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcb560ec-da35-4b84-86f5-e56a181c4194-utilities\") pod \"community-operators-ddzjp\" (UID: \"dcb560ec-da35-4b84-86f5-e56a181c4194\") " pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.144094 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vwg2\" (UniqueName: \"kubernetes.io/projected/dcb560ec-da35-4b84-86f5-e56a181c4194-kube-api-access-7vwg2\") pod \"community-operators-ddzjp\" (UID: \"dcb560ec-da35-4b84-86f5-e56a181c4194\") " pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.176119 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-54vv5"] Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.211391 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"dac25fc9fc3a89aac518d1c3d04ccd5f19d15da3cee365c9077c4eeefb94c399"} Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.211433 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"af5f6ed60a68b23e24e968a158e9bbc55414d77c42e13ae215e17260c9daa381"} Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.213905 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"67a93e4acb352ed4f7e92a55d7d441dacfd0c38daf0f7e9b9fc2940219947ee6"} Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.221234 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f7p6" event={"ID":"283bf417-5302-4743-8a44-76fa61eba04b","Type":"ContainerStarted","Data":"c41f811134543cf3b24f6bfc238bc8d2c37dfe3fd54b513f178dedb3971134eb"} Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.226693 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.226988 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:04.726962379 +0000 UTC m=+149.842995506 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.227445 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-k46x5" event={"ID":"a25ce588-b65d-4541-ba64-7a4219330a33","Type":"ContainerStarted","Data":"89b98dd9a1daeb9e4b11d4129deab67e1015c2c27fb5fcf06d090204de7ac678"} Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.227487 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-k46x5" event={"ID":"a25ce588-b65d-4541-ba64-7a4219330a33","Type":"ContainerStarted","Data":"e31e6cf5bcd50381577f7b444533307fe21c6f8fce456c7fe6badd49a4bb3d93"} Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.227690 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.228457 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:04.728448612 +0000 UTC m=+149.844481739 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.247757 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"2244787b835397a4e7db2ca12eb7c58fb99a01f57b36feeb5635eb548a922b5f"} Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.247792 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"aa8bf66fd6bd044ef3398f1d08c7d0436ea364d87a68c39875d6cff291cfd4ee"} Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.276002 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-k46x5" podStartSLOduration=10.275988927 podStartE2EDuration="10.275988927s" podCreationTimestamp="2025-09-30 20:15:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:04.273839615 +0000 UTC m=+149.389872742" watchObservedRunningTime="2025-09-30 20:16:04.275988927 +0000 UTC m=+149.392022054" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.297484 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.329665 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.330482 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:04.830453833 +0000 UTC m=+149.946486960 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.390193 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.431139 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.433189 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:04.933171894 +0000 UTC m=+150.049205021 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.534495 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.534822 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:05.034799303 +0000 UTC m=+150.150832420 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.535026 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.535345 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:05.035331778 +0000 UTC m=+150.151364905 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.545412 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.579906 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ddzjp"] Sep 30 20:16:04 crc kubenswrapper[4919]: W0930 20:16:04.586057 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddcb560ec_da35_4b84_86f5_e56a181c4194.slice/crio-4e10505f3d05c46af6e182463ef346e8bcb28bf19e66324fced0a9fbd0278211 WatchSource:0}: Error finding container 4e10505f3d05c46af6e182463ef346e8bcb28bf19e66324fced0a9fbd0278211: Status 404 returned error can't find the container with id 4e10505f3d05c46af6e182463ef346e8bcb28bf19e66324fced0a9fbd0278211 Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.637133 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2crdl\" (UniqueName: \"kubernetes.io/projected/65daa38d-8652-4438-af0e-5afc3524e5d4-kube-api-access-2crdl\") pod \"65daa38d-8652-4438-af0e-5afc3524e5d4\" (UID: \"65daa38d-8652-4438-af0e-5afc3524e5d4\") " Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.637429 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.637516 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/65daa38d-8652-4438-af0e-5afc3524e5d4-secret-volume\") pod \"65daa38d-8652-4438-af0e-5afc3524e5d4\" (UID: \"65daa38d-8652-4438-af0e-5afc3524e5d4\") " Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.637608 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65daa38d-8652-4438-af0e-5afc3524e5d4-config-volume\") pod \"65daa38d-8652-4438-af0e-5afc3524e5d4\" (UID: \"65daa38d-8652-4438-af0e-5afc3524e5d4\") " Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.637717 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:05.137694349 +0000 UTC m=+150.253727476 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.637892 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.638251 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:05.138239075 +0000 UTC m=+150.254272202 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.639299 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65daa38d-8652-4438-af0e-5afc3524e5d4-config-volume" (OuterVolumeSpecName: "config-volume") pod "65daa38d-8652-4438-af0e-5afc3524e5d4" (UID: "65daa38d-8652-4438-af0e-5afc3524e5d4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.643492 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65daa38d-8652-4438-af0e-5afc3524e5d4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "65daa38d-8652-4438-af0e-5afc3524e5d4" (UID: "65daa38d-8652-4438-af0e-5afc3524e5d4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.643889 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65daa38d-8652-4438-af0e-5afc3524e5d4-kube-api-access-2crdl" (OuterVolumeSpecName: "kube-api-access-2crdl") pod "65daa38d-8652-4438-af0e-5afc3524e5d4" (UID: "65daa38d-8652-4438-af0e-5afc3524e5d4"). InnerVolumeSpecName "kube-api-access-2crdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.668714 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7grc2"] Sep 30 20:16:04 crc kubenswrapper[4919]: W0930 20:16:04.675610 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod14a021e6_225d_498c_aa4c_008e2ad9580d.slice/crio-4baf8f5319c587771da80b103636a906e40a82e036e4ef62c50a78dd7a053f2b WatchSource:0}: Error finding container 4baf8f5319c587771da80b103636a906e40a82e036e4ef62c50a78dd7a053f2b: Status 404 returned error can't find the container with id 4baf8f5319c587771da80b103636a906e40a82e036e4ef62c50a78dd7a053f2b Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.740472 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.740697 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-30 20:16:05.240671498 +0000 UTC m=+150.356704625 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.741510 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.741755 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2crdl\" (UniqueName: \"kubernetes.io/projected/65daa38d-8652-4438-af0e-5afc3524e5d4-kube-api-access-2crdl\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:04 crc kubenswrapper[4919]: E0930 20:16:04.741895 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-30 20:16:05.241877322 +0000 UTC m=+150.357910449 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zpbl8" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.742077 4919 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/65daa38d-8652-4438-af0e-5afc3524e5d4-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.742204 4919 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/65daa38d-8652-4438-af0e-5afc3524e5d4-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.779425 4919 patch_prober.go:28] interesting pod/router-default-5444994796-c55sj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 20:16:04 crc kubenswrapper[4919]: [-]has-synced failed: reason withheld Sep 30 20:16:04 crc kubenswrapper[4919]: [+]process-running ok Sep 30 20:16:04 crc kubenswrapper[4919]: healthz check failed Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.779505 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c55sj" podUID="c019c27b-0131-498d-b84a-6c79511d176e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.791392 4919 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-30T20:16:03.802991327Z","Handler":null,"Name":""} Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.797258 4919 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.797290 4919 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.843709 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.861015 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.945143 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.984090 4919 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:16:04 crc kubenswrapper[4919]: I0930 20:16:04.984137 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.012722 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zpbl8\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.170820 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.226678 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 20:16:05 crc kubenswrapper[4919]: E0930 20:16:05.227068 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65daa38d-8652-4438-af0e-5afc3524e5d4" containerName="collect-profiles" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.227079 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="65daa38d-8652-4438-af0e-5afc3524e5d4" containerName="collect-profiles" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.227178 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="65daa38d-8652-4438-af0e-5afc3524e5d4" containerName="collect-profiles" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.227541 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.229373 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.229724 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.236555 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.249558 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/48165011-4466-4728-96bc-d6e986584019-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"48165011-4466-4728-96bc-d6e986584019\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.249840 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48165011-4466-4728-96bc-d6e986584019-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"48165011-4466-4728-96bc-d6e986584019\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.278078 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" event={"ID":"65daa38d-8652-4438-af0e-5afc3524e5d4","Type":"ContainerDied","Data":"4dc1998eee24c1ae8153d11e0cdcafb95d28c54bd2ba714c757ef2e7d4ba32d3"} Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.278116 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4dc1998eee24c1ae8153d11e0cdcafb95d28c54bd2ba714c757ef2e7d4ba32d3" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.278182 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.289393 4919 generic.go:334] "Generic (PLEG): container finished" podID="dcb560ec-da35-4b84-86f5-e56a181c4194" containerID="0cadd33062ee2c56a1e687c7ee3e223afdad61aec076462b52e9a9f83b8d5404" exitCode=0 Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.289453 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ddzjp" event={"ID":"dcb560ec-da35-4b84-86f5-e56a181c4194","Type":"ContainerDied","Data":"0cadd33062ee2c56a1e687c7ee3e223afdad61aec076462b52e9a9f83b8d5404"} Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.289477 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ddzjp" event={"ID":"dcb560ec-da35-4b84-86f5-e56a181c4194","Type":"ContainerStarted","Data":"4e10505f3d05c46af6e182463ef346e8bcb28bf19e66324fced0a9fbd0278211"} Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.293591 4919 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.297335 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"5e5746ce7a90b2235137ce12a029206be9fc58dc26c2f42a73019f5ddae760c2"} Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.297507 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.300154 4919 generic.go:334] "Generic (PLEG): container finished" podID="283bf417-5302-4743-8a44-76fa61eba04b" containerID="6056abfe9cbd192ca6743df484aaafc5ef89e4d90b8426c4cd252486da3c99bf" exitCode=0 Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.300206 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f7p6" event={"ID":"283bf417-5302-4743-8a44-76fa61eba04b","Type":"ContainerDied","Data":"6056abfe9cbd192ca6743df484aaafc5ef89e4d90b8426c4cd252486da3c99bf"} Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.303777 4919 generic.go:334] "Generic (PLEG): container finished" podID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" containerID="c7586d6a23026556ee67456961b18d026df8f9bb465e425f2e2eecf7d18260d9" exitCode=0 Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.303866 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-54vv5" event={"ID":"f4ca8312-ff90-418e-8503-8acc3f9d63b1","Type":"ContainerDied","Data":"c7586d6a23026556ee67456961b18d026df8f9bb465e425f2e2eecf7d18260d9"} Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.303890 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-54vv5" event={"ID":"f4ca8312-ff90-418e-8503-8acc3f9d63b1","Type":"ContainerStarted","Data":"a98613a90b7ee027ea7dd461259eee3495b6c340c831a001850c89ad55564fda"} Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.308385 4919 generic.go:334] "Generic (PLEG): container finished" podID="14a021e6-225d-498c-aa4c-008e2ad9580d" containerID="f8b28f397786636190cbc35a6237cea016f54df6584ea0756f45ce0a16b43f22" exitCode=0 Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.309169 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7grc2" event={"ID":"14a021e6-225d-498c-aa4c-008e2ad9580d","Type":"ContainerDied","Data":"f8b28f397786636190cbc35a6237cea016f54df6584ea0756f45ce0a16b43f22"} Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.309199 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7grc2" event={"ID":"14a021e6-225d-498c-aa4c-008e2ad9580d","Type":"ContainerStarted","Data":"4baf8f5319c587771da80b103636a906e40a82e036e4ef62c50a78dd7a053f2b"} Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.369601 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48165011-4466-4728-96bc-d6e986584019-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"48165011-4466-4728-96bc-d6e986584019\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.369756 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/48165011-4466-4728-96bc-d6e986584019-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"48165011-4466-4728-96bc-d6e986584019\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.371503 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/48165011-4466-4728-96bc-d6e986584019-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"48165011-4466-4728-96bc-d6e986584019\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.390983 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48165011-4466-4728-96bc-d6e986584019-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"48165011-4466-4728-96bc-d6e986584019\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.397562 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-98qcl"] Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.399883 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.402109 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.421457 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-98qcl"] Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.471955 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82021dd4-fbb8-4832-a38d-cd00aa9d786b-catalog-content\") pod \"redhat-marketplace-98qcl\" (UID: \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\") " pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.472128 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n2x9\" (UniqueName: \"kubernetes.io/projected/82021dd4-fbb8-4832-a38d-cd00aa9d786b-kube-api-access-6n2x9\") pod \"redhat-marketplace-98qcl\" (UID: \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\") " pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.472474 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82021dd4-fbb8-4832-a38d-cd00aa9d786b-utilities\") pod \"redhat-marketplace-98qcl\" (UID: \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\") " pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.546826 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.573376 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82021dd4-fbb8-4832-a38d-cd00aa9d786b-catalog-content\") pod \"redhat-marketplace-98qcl\" (UID: \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\") " pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.573427 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n2x9\" (UniqueName: \"kubernetes.io/projected/82021dd4-fbb8-4832-a38d-cd00aa9d786b-kube-api-access-6n2x9\") pod \"redhat-marketplace-98qcl\" (UID: \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\") " pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.573459 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82021dd4-fbb8-4832-a38d-cd00aa9d786b-utilities\") pod \"redhat-marketplace-98qcl\" (UID: \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\") " pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.573907 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82021dd4-fbb8-4832-a38d-cd00aa9d786b-utilities\") pod \"redhat-marketplace-98qcl\" (UID: \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\") " pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.574134 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82021dd4-fbb8-4832-a38d-cd00aa9d786b-catalog-content\") pod \"redhat-marketplace-98qcl\" (UID: \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\") " pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.596494 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n2x9\" (UniqueName: \"kubernetes.io/projected/82021dd4-fbb8-4832-a38d-cd00aa9d786b-kube-api-access-6n2x9\") pod \"redhat-marketplace-98qcl\" (UID: \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\") " pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.648130 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.686718 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zpbl8"] Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.742139 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.764245 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jm7dz"] Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.765539 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.772545 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jm7dz"] Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.780432 4919 patch_prober.go:28] interesting pod/router-default-5444994796-c55sj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 20:16:05 crc kubenswrapper[4919]: [-]has-synced failed: reason withheld Sep 30 20:16:05 crc kubenswrapper[4919]: [+]process-running ok Sep 30 20:16:05 crc kubenswrapper[4919]: healthz check failed Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.780490 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c55sj" podUID="c019c27b-0131-498d-b84a-6c79511d176e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.876898 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-utilities\") pod \"redhat-marketplace-jm7dz\" (UID: \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\") " pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.877262 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-catalog-content\") pod \"redhat-marketplace-jm7dz\" (UID: \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\") " pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.877343 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnbqx\" (UniqueName: \"kubernetes.io/projected/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-kube-api-access-cnbqx\") pod \"redhat-marketplace-jm7dz\" (UID: \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\") " pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.972811 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.978907 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-catalog-content\") pod \"redhat-marketplace-jm7dz\" (UID: \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\") " pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.978948 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-utilities\") pod \"redhat-marketplace-jm7dz\" (UID: \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\") " pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.979007 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnbqx\" (UniqueName: \"kubernetes.io/projected/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-kube-api-access-cnbqx\") pod \"redhat-marketplace-jm7dz\" (UID: \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\") " pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.979682 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-catalog-content\") pod \"redhat-marketplace-jm7dz\" (UID: \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\") " pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:05 crc kubenswrapper[4919]: I0930 20:16:05.979935 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-utilities\") pod \"redhat-marketplace-jm7dz\" (UID: \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\") " pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.000773 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-98qcl"] Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.006956 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnbqx\" (UniqueName: \"kubernetes.io/projected/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-kube-api-access-cnbqx\") pod \"redhat-marketplace-jm7dz\" (UID: \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\") " pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.084059 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.359054 4919 generic.go:334] "Generic (PLEG): container finished" podID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" containerID="d788dc51b20b4d24286faa632c311de9b067999daf30fb3deb69b49e8d59be4c" exitCode=0 Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.359879 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98qcl" event={"ID":"82021dd4-fbb8-4832-a38d-cd00aa9d786b","Type":"ContainerDied","Data":"d788dc51b20b4d24286faa632c311de9b067999daf30fb3deb69b49e8d59be4c"} Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.359911 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98qcl" event={"ID":"82021dd4-fbb8-4832-a38d-cd00aa9d786b","Type":"ContainerStarted","Data":"26bfd5c31d9d68d0bc4d381d6d1f0732f4d78a4cc8e9f51621dd96787e61dc95"} Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.367261 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"48165011-4466-4728-96bc-d6e986584019","Type":"ContainerStarted","Data":"eeba91fde82ced0667631d6ae450921e6b36b32c6c46d72aa9895c934ce8bf1e"} Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.384538 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" event={"ID":"6be387e2-3aff-43e1-91bc-bc8257764da1","Type":"ContainerStarted","Data":"6f651912a7ea66d9bee35d136c8df7325d2f88f220256e6c07771545897963bf"} Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.384573 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" event={"ID":"6be387e2-3aff-43e1-91bc-bc8257764da1","Type":"ContainerStarted","Data":"eba7282bb6685f3d0fe2caf7b68c3cf7d1399cd18719fb9706b632e9c7928509"} Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.384585 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.431548 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" podStartSLOduration=129.431530644 podStartE2EDuration="2m9.431530644s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:06.416156789 +0000 UTC m=+151.532189936" watchObservedRunningTime="2025-09-30 20:16:06.431530644 +0000 UTC m=+151.547563771" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.433069 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jm7dz"] Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.559718 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p6xv9"] Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.567090 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p6xv9"] Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.567260 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.570425 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.589916 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8339c07-c7d4-4da0-8927-b99887894379-utilities\") pod \"redhat-operators-p6xv9\" (UID: \"d8339c07-c7d4-4da0-8927-b99887894379\") " pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.590018 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8339c07-c7d4-4da0-8927-b99887894379-catalog-content\") pod \"redhat-operators-p6xv9\" (UID: \"d8339c07-c7d4-4da0-8927-b99887894379\") " pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.590105 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfv7k\" (UniqueName: \"kubernetes.io/projected/d8339c07-c7d4-4da0-8927-b99887894379-kube-api-access-qfv7k\") pod \"redhat-operators-p6xv9\" (UID: \"d8339c07-c7d4-4da0-8927-b99887894379\") " pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.691097 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfv7k\" (UniqueName: \"kubernetes.io/projected/d8339c07-c7d4-4da0-8927-b99887894379-kube-api-access-qfv7k\") pod \"redhat-operators-p6xv9\" (UID: \"d8339c07-c7d4-4da0-8927-b99887894379\") " pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.691149 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8339c07-c7d4-4da0-8927-b99887894379-utilities\") pod \"redhat-operators-p6xv9\" (UID: \"d8339c07-c7d4-4da0-8927-b99887894379\") " pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.691193 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8339c07-c7d4-4da0-8927-b99887894379-catalog-content\") pod \"redhat-operators-p6xv9\" (UID: \"d8339c07-c7d4-4da0-8927-b99887894379\") " pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.691899 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8339c07-c7d4-4da0-8927-b99887894379-utilities\") pod \"redhat-operators-p6xv9\" (UID: \"d8339c07-c7d4-4da0-8927-b99887894379\") " pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.692502 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8339c07-c7d4-4da0-8927-b99887894379-catalog-content\") pod \"redhat-operators-p6xv9\" (UID: \"d8339c07-c7d4-4da0-8927-b99887894379\") " pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.714229 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfv7k\" (UniqueName: \"kubernetes.io/projected/d8339c07-c7d4-4da0-8927-b99887894379-kube-api-access-qfv7k\") pod \"redhat-operators-p6xv9\" (UID: \"d8339c07-c7d4-4da0-8927-b99887894379\") " pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.766873 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.767094 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.773907 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.779075 4919 patch_prober.go:28] interesting pod/router-default-5444994796-c55sj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 20:16:06 crc kubenswrapper[4919]: [-]has-synced failed: reason withheld Sep 30 20:16:06 crc kubenswrapper[4919]: [+]process-running ok Sep 30 20:16:06 crc kubenswrapper[4919]: healthz check failed Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.779131 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c55sj" podUID="c019c27b-0131-498d-b84a-6c79511d176e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.901885 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.932802 4919 patch_prober.go:28] interesting pod/downloads-7954f5f757-2d7ng container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.932854 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2d7ng" podUID="23df7f75-7b68-4810-92a4-b0e7e39f9bf4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.932913 4919 patch_prober.go:28] interesting pod/downloads-7954f5f757-2d7ng container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.932960 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2d7ng" podUID="23df7f75-7b68-4810-92a4-b0e7e39f9bf4" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.946228 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-95mmt"] Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.947509 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.955073 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-95mmt"] Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.996503 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9394b462-e717-46a0-b247-57181adb5d6a-utilities\") pod \"redhat-operators-95mmt\" (UID: \"9394b462-e717-46a0-b247-57181adb5d6a\") " pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.996547 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9394b462-e717-46a0-b247-57181adb5d6a-catalog-content\") pod \"redhat-operators-95mmt\" (UID: \"9394b462-e717-46a0-b247-57181adb5d6a\") " pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:06 crc kubenswrapper[4919]: I0930 20:16:06.996567 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xxqj\" (UniqueName: \"kubernetes.io/projected/9394b462-e717-46a0-b247-57181adb5d6a-kube-api-access-2xxqj\") pod \"redhat-operators-95mmt\" (UID: \"9394b462-e717-46a0-b247-57181adb5d6a\") " pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.097407 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9394b462-e717-46a0-b247-57181adb5d6a-utilities\") pod \"redhat-operators-95mmt\" (UID: \"9394b462-e717-46a0-b247-57181adb5d6a\") " pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.097439 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9394b462-e717-46a0-b247-57181adb5d6a-catalog-content\") pod \"redhat-operators-95mmt\" (UID: \"9394b462-e717-46a0-b247-57181adb5d6a\") " pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.097463 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xxqj\" (UniqueName: \"kubernetes.io/projected/9394b462-e717-46a0-b247-57181adb5d6a-kube-api-access-2xxqj\") pod \"redhat-operators-95mmt\" (UID: \"9394b462-e717-46a0-b247-57181adb5d6a\") " pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.098419 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9394b462-e717-46a0-b247-57181adb5d6a-utilities\") pod \"redhat-operators-95mmt\" (UID: \"9394b462-e717-46a0-b247-57181adb5d6a\") " pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.098431 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9394b462-e717-46a0-b247-57181adb5d6a-catalog-content\") pod \"redhat-operators-95mmt\" (UID: \"9394b462-e717-46a0-b247-57181adb5d6a\") " pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.123524 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xxqj\" (UniqueName: \"kubernetes.io/projected/9394b462-e717-46a0-b247-57181adb5d6a-kube-api-access-2xxqj\") pod \"redhat-operators-95mmt\" (UID: \"9394b462-e717-46a0-b247-57181adb5d6a\") " pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.293535 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.293893 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.295274 4919 patch_prober.go:28] interesting pod/console-f9d7485db-qxlpx container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.27:8443/health\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.295324 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-qxlpx" podUID="2d053914-edeb-49d0-bffa-b6d63885a5fb" containerName="console" probeResult="failure" output="Get \"https://10.217.0.27:8443/health\": dial tcp 10.217.0.27:8443: connect: connection refused" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.339687 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.351992 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-2lzkb" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.391337 4919 generic.go:334] "Generic (PLEG): container finished" podID="48165011-4466-4728-96bc-d6e986584019" containerID="8219c327786b4e41427c2a52a547fb21860aa337849052ec0a1ec43de867c77e" exitCode=0 Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.391404 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"48165011-4466-4728-96bc-d6e986584019","Type":"ContainerDied","Data":"8219c327786b4e41427c2a52a547fb21860aa337849052ec0a1ec43de867c77e"} Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.394434 4919 generic.go:334] "Generic (PLEG): container finished" podID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" containerID="277a35ea4af9c957da4a931c73eee80e334a343993f3522db39510791c31ba61" exitCode=0 Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.394604 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jm7dz" event={"ID":"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7","Type":"ContainerDied","Data":"277a35ea4af9c957da4a931c73eee80e334a343993f3522db39510791c31ba61"} Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.394650 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jm7dz" event={"ID":"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7","Type":"ContainerStarted","Data":"623670a2ac6002cf76b8903bd03a28368f37e51a9f10eff001ef2411cfb80dab"} Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.404251 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-xns2c" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.412158 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p6xv9"] Sep 30 20:16:07 crc kubenswrapper[4919]: W0930 20:16:07.427431 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8339c07_c7d4_4da0_8927_b99887894379.slice/crio-590fbfd1f67fc384fee26f774d8f8dd1afe8c8416c3069af2a563973e92cd36f WatchSource:0}: Error finding container 590fbfd1f67fc384fee26f774d8f8dd1afe8c8416c3069af2a563973e92cd36f: Status 404 returned error can't find the container with id 590fbfd1f67fc384fee26f774d8f8dd1afe8c8416c3069af2a563973e92cd36f Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.483330 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.663621 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-95mmt"] Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.776715 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.779320 4919 patch_prober.go:28] interesting pod/router-default-5444994796-c55sj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 20:16:07 crc kubenswrapper[4919]: [-]has-synced failed: reason withheld Sep 30 20:16:07 crc kubenswrapper[4919]: [+]process-running ok Sep 30 20:16:07 crc kubenswrapper[4919]: healthz check failed Sep 30 20:16:07 crc kubenswrapper[4919]: I0930 20:16:07.779355 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c55sj" podUID="c019c27b-0131-498d-b84a-6c79511d176e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.453262 4919 generic.go:334] "Generic (PLEG): container finished" podID="d8339c07-c7d4-4da0-8927-b99887894379" containerID="214c24bc401681dfa5a8eafc7f57325e5da0b8a12b74a576376de0f7cb9f2812" exitCode=0 Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.453359 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p6xv9" event={"ID":"d8339c07-c7d4-4da0-8927-b99887894379","Type":"ContainerDied","Data":"214c24bc401681dfa5a8eafc7f57325e5da0b8a12b74a576376de0f7cb9f2812"} Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.453413 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p6xv9" event={"ID":"d8339c07-c7d4-4da0-8927-b99887894379","Type":"ContainerStarted","Data":"590fbfd1f67fc384fee26f774d8f8dd1afe8c8416c3069af2a563973e92cd36f"} Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.472490 4919 generic.go:334] "Generic (PLEG): container finished" podID="9394b462-e717-46a0-b247-57181adb5d6a" containerID="c49f49b408cc9b03fe80735dcdf4eb1d18c8328f62c0189431f5f67f9411f12d" exitCode=0 Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.473164 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95mmt" event={"ID":"9394b462-e717-46a0-b247-57181adb5d6a","Type":"ContainerDied","Data":"c49f49b408cc9b03fe80735dcdf4eb1d18c8328f62c0189431f5f67f9411f12d"} Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.473205 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95mmt" event={"ID":"9394b462-e717-46a0-b247-57181adb5d6a","Type":"ContainerStarted","Data":"b7382558a3a215264dd7afe758038aa5e38e2a0e20249ebc30e743241bc51f40"} Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.789601 4919 patch_prober.go:28] interesting pod/router-default-5444994796-c55sj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 20:16:08 crc kubenswrapper[4919]: [-]has-synced failed: reason withheld Sep 30 20:16:08 crc kubenswrapper[4919]: [+]process-running ok Sep 30 20:16:08 crc kubenswrapper[4919]: healthz check failed Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.789891 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c55sj" podUID="c019c27b-0131-498d-b84a-6c79511d176e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.856152 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.930727 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/48165011-4466-4728-96bc-d6e986584019-kubelet-dir\") pod \"48165011-4466-4728-96bc-d6e986584019\" (UID: \"48165011-4466-4728-96bc-d6e986584019\") " Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.930812 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48165011-4466-4728-96bc-d6e986584019-kube-api-access\") pod \"48165011-4466-4728-96bc-d6e986584019\" (UID: \"48165011-4466-4728-96bc-d6e986584019\") " Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.933319 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/48165011-4466-4728-96bc-d6e986584019-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "48165011-4466-4728-96bc-d6e986584019" (UID: "48165011-4466-4728-96bc-d6e986584019"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:16:08 crc kubenswrapper[4919]: I0930 20:16:08.952680 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48165011-4466-4728-96bc-d6e986584019-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "48165011-4466-4728-96bc-d6e986584019" (UID: "48165011-4466-4728-96bc-d6e986584019"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.032992 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48165011-4466-4728-96bc-d6e986584019-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.033025 4919 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/48165011-4466-4728-96bc-d6e986584019-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.495273 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.495665 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"48165011-4466-4728-96bc-d6e986584019","Type":"ContainerDied","Data":"eeba91fde82ced0667631d6ae450921e6b36b32c6c46d72aa9895c934ce8bf1e"} Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.495693 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eeba91fde82ced0667631d6ae450921e6b36b32c6c46d72aa9895c934ce8bf1e" Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.779276 4919 patch_prober.go:28] interesting pod/router-default-5444994796-c55sj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 30 20:16:09 crc kubenswrapper[4919]: [+]has-synced ok Sep 30 20:16:09 crc kubenswrapper[4919]: [+]process-running ok Sep 30 20:16:09 crc kubenswrapper[4919]: healthz check failed Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.779325 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-c55sj" podUID="c019c27b-0131-498d-b84a-6c79511d176e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.989411 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 20:16:09 crc kubenswrapper[4919]: E0930 20:16:09.990300 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48165011-4466-4728-96bc-d6e986584019" containerName="pruner" Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.990317 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="48165011-4466-4728-96bc-d6e986584019" containerName="pruner" Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.990530 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="48165011-4466-4728-96bc-d6e986584019" containerName="pruner" Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.991172 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.994067 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.994174 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 20:16:09 crc kubenswrapper[4919]: I0930 20:16:09.994570 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 30 20:16:10 crc kubenswrapper[4919]: I0930 20:16:10.045847 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 20:16:10 crc kubenswrapper[4919]: I0930 20:16:10.046462 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 20:16:10 crc kubenswrapper[4919]: I0930 20:16:10.155178 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 20:16:10 crc kubenswrapper[4919]: I0930 20:16:10.155284 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 20:16:10 crc kubenswrapper[4919]: I0930 20:16:10.155483 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 20:16:10 crc kubenswrapper[4919]: I0930 20:16:10.177718 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 20:16:10 crc kubenswrapper[4919]: I0930 20:16:10.321408 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 20:16:10 crc kubenswrapper[4919]: I0930 20:16:10.781518 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:16:10 crc kubenswrapper[4919]: I0930 20:16:10.787204 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-c55sj" Sep 30 20:16:12 crc kubenswrapper[4919]: I0930 20:16:12.620440 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-hj49j" Sep 30 20:16:16 crc kubenswrapper[4919]: I0930 20:16:16.936788 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-2d7ng" Sep 30 20:16:17 crc kubenswrapper[4919]: I0930 20:16:17.304625 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:16:17 crc kubenswrapper[4919]: I0930 20:16:17.307965 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:16:19 crc kubenswrapper[4919]: I0930 20:16:19.905301 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:16:19 crc kubenswrapper[4919]: I0930 20:16:19.915461 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c0624d31-70fc-4d66-a31b-4e67896ab40e-metrics-certs\") pod \"network-metrics-daemon-bwpdf\" (UID: \"c0624d31-70fc-4d66-a31b-4e67896ab40e\") " pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:16:19 crc kubenswrapper[4919]: I0930 20:16:19.984574 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-bwpdf" Sep 30 20:16:25 crc kubenswrapper[4919]: I0930 20:16:25.174984 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:16:26 crc kubenswrapper[4919]: I0930 20:16:26.062263 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:16:26 crc kubenswrapper[4919]: I0930 20:16:26.062348 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:16:28 crc kubenswrapper[4919]: I0930 20:16:28.871754 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 30 20:16:29 crc kubenswrapper[4919]: E0930 20:16:29.574422 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 20:16:29 crc kubenswrapper[4919]: E0930 20:16:29.574575 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6n2x9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-98qcl_openshift-marketplace(82021dd4-fbb8-4832-a38d-cd00aa9d786b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 20:16:29 crc kubenswrapper[4919]: E0930 20:16:29.575774 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-98qcl" podUID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" Sep 30 20:16:29 crc kubenswrapper[4919]: E0930 20:16:29.651308 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 30 20:16:29 crc kubenswrapper[4919]: E0930 20:16:29.651444 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cnbqx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-jm7dz_openshift-marketplace(ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 20:16:29 crc kubenswrapper[4919]: E0930 20:16:29.652613 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-jm7dz" podUID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" Sep 30 20:16:32 crc kubenswrapper[4919]: E0930 20:16:32.347058 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 30 20:16:32 crc kubenswrapper[4919]: E0930 20:16:32.348621 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qfv7k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-p6xv9_openshift-marketplace(d8339c07-c7d4-4da0-8927-b99887894379): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 20:16:32 crc kubenswrapper[4919]: E0930 20:16:32.350130 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-p6xv9" podUID="d8339c07-c7d4-4da0-8927-b99887894379" Sep 30 20:16:33 crc kubenswrapper[4919]: E0930 20:16:33.646007 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 30 20:16:33 crc kubenswrapper[4919]: E0930 20:16:33.646497 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5dw4f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-7grc2_openshift-marketplace(14a021e6-225d-498c-aa4c-008e2ad9580d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 20:16:33 crc kubenswrapper[4919]: E0930 20:16:33.647682 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-7grc2" podUID="14a021e6-225d-498c-aa4c-008e2ad9580d" Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.643689 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-p6xv9" podUID="d8339c07-c7d4-4da0-8927-b99887894379" Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.643794 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-jm7dz" podUID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.643861 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-98qcl" podUID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.643904 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-7grc2" podUID="14a021e6-225d-498c-aa4c-008e2ad9580d" Sep 30 20:16:34 crc kubenswrapper[4919]: I0930 20:16:34.735904 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3","Type":"ContainerStarted","Data":"d03c52bf20667dc8204f40f2084a1cc40e0405adf5f770c3df9a94d025c62836"} Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.745431 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.745564 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7vwg2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-ddzjp_openshift-marketplace(dcb560ec-da35-4b84-86f5-e56a181c4194): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.746913 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-ddzjp" podUID="dcb560ec-da35-4b84-86f5-e56a181c4194" Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.791318 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.791476 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4jlzp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-54vv5_openshift-marketplace(f4ca8312-ff90-418e-8503-8acc3f9d63b1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.793205 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-54vv5" podUID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.805106 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.805300 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d726b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-7f7p6_openshift-marketplace(283bf417-5302-4743-8a44-76fa61eba04b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 20:16:34 crc kubenswrapper[4919]: E0930 20:16:34.807355 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-7f7p6" podUID="283bf417-5302-4743-8a44-76fa61eba04b" Sep 30 20:16:35 crc kubenswrapper[4919]: I0930 20:16:35.106447 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-bwpdf"] Sep 30 20:16:35 crc kubenswrapper[4919]: W0930 20:16:35.119978 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0624d31_70fc_4d66_a31b_4e67896ab40e.slice/crio-2577d4b442880b3a04b92393b584645f57b0d3816c2011b583773fc05cc62636 WatchSource:0}: Error finding container 2577d4b442880b3a04b92393b584645f57b0d3816c2011b583773fc05cc62636: Status 404 returned error can't find the container with id 2577d4b442880b3a04b92393b584645f57b0d3816c2011b583773fc05cc62636 Sep 30 20:16:35 crc kubenswrapper[4919]: I0930 20:16:35.741695 4919 generic.go:334] "Generic (PLEG): container finished" podID="9394b462-e717-46a0-b247-57181adb5d6a" containerID="7950fe638ef4bd2a963370f883d5b5bdd044e3fba50919d01348a3a548aced8d" exitCode=0 Sep 30 20:16:35 crc kubenswrapper[4919]: I0930 20:16:35.741765 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95mmt" event={"ID":"9394b462-e717-46a0-b247-57181adb5d6a","Type":"ContainerDied","Data":"7950fe638ef4bd2a963370f883d5b5bdd044e3fba50919d01348a3a548aced8d"} Sep 30 20:16:35 crc kubenswrapper[4919]: I0930 20:16:35.744436 4919 generic.go:334] "Generic (PLEG): container finished" podID="9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3" containerID="f05869d30cbc8cf78c880adad3d7cda4eb29373ef3e21b8eabcf1bc593eeb9ad" exitCode=0 Sep 30 20:16:35 crc kubenswrapper[4919]: I0930 20:16:35.744499 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3","Type":"ContainerDied","Data":"f05869d30cbc8cf78c880adad3d7cda4eb29373ef3e21b8eabcf1bc593eeb9ad"} Sep 30 20:16:35 crc kubenswrapper[4919]: I0930 20:16:35.747640 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" event={"ID":"c0624d31-70fc-4d66-a31b-4e67896ab40e","Type":"ContainerStarted","Data":"2ec980094f7200d11a3338ed0f5bf203cd2f4ee622a6b113f333f8aded2e05f3"} Sep 30 20:16:35 crc kubenswrapper[4919]: I0930 20:16:35.747659 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" event={"ID":"c0624d31-70fc-4d66-a31b-4e67896ab40e","Type":"ContainerStarted","Data":"4bca04e1528ab143318f2bacbf232136b6a4bca96ee4d6ce937c004412b9a3c9"} Sep 30 20:16:35 crc kubenswrapper[4919]: I0930 20:16:35.747669 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-bwpdf" event={"ID":"c0624d31-70fc-4d66-a31b-4e67896ab40e","Type":"ContainerStarted","Data":"2577d4b442880b3a04b92393b584645f57b0d3816c2011b583773fc05cc62636"} Sep 30 20:16:35 crc kubenswrapper[4919]: E0930 20:16:35.748642 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-7f7p6" podUID="283bf417-5302-4743-8a44-76fa61eba04b" Sep 30 20:16:35 crc kubenswrapper[4919]: E0930 20:16:35.749482 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-ddzjp" podUID="dcb560ec-da35-4b84-86f5-e56a181c4194" Sep 30 20:16:35 crc kubenswrapper[4919]: E0930 20:16:35.749552 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-54vv5" podUID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" Sep 30 20:16:35 crc kubenswrapper[4919]: I0930 20:16:35.793566 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-bwpdf" podStartSLOduration=158.793550897 podStartE2EDuration="2m38.793550897s" podCreationTimestamp="2025-09-30 20:13:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:16:35.793035983 +0000 UTC m=+180.909069130" watchObservedRunningTime="2025-09-30 20:16:35.793550897 +0000 UTC m=+180.909584034" Sep 30 20:16:36 crc kubenswrapper[4919]: I0930 20:16:36.758760 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95mmt" event={"ID":"9394b462-e717-46a0-b247-57181adb5d6a","Type":"ContainerStarted","Data":"f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9"} Sep 30 20:16:36 crc kubenswrapper[4919]: I0930 20:16:36.995500 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.012000 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-95mmt" podStartSLOduration=3.146646434 podStartE2EDuration="31.011981938s" podCreationTimestamp="2025-09-30 20:16:06 +0000 UTC" firstStartedPulling="2025-09-30 20:16:08.47715171 +0000 UTC m=+153.593184837" lastFinishedPulling="2025-09-30 20:16:36.342487184 +0000 UTC m=+181.458520341" observedRunningTime="2025-09-30 20:16:36.78764439 +0000 UTC m=+181.903677577" watchObservedRunningTime="2025-09-30 20:16:37.011981938 +0000 UTC m=+182.128015065" Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.145263 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3-kubelet-dir\") pod \"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3\" (UID: \"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3\") " Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.145707 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3-kube-api-access\") pod \"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3\" (UID: \"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3\") " Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.145397 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3" (UID: "9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.145982 4919 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.155368 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3" (UID: "9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.247205 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.340228 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.340282 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.767119 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3","Type":"ContainerDied","Data":"d03c52bf20667dc8204f40f2084a1cc40e0405adf5f770c3df9a94d025c62836"} Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.767162 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d03c52bf20667dc8204f40f2084a1cc40e0405adf5f770c3df9a94d025c62836" Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.767138 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 30 20:16:37 crc kubenswrapper[4919]: I0930 20:16:37.846780 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-2b2p5" Sep 30 20:16:38 crc kubenswrapper[4919]: I0930 20:16:38.481616 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-95mmt" podUID="9394b462-e717-46a0-b247-57181adb5d6a" containerName="registry-server" probeResult="failure" output=< Sep 30 20:16:38 crc kubenswrapper[4919]: timeout: failed to connect service ":50051" within 1s Sep 30 20:16:38 crc kubenswrapper[4919]: > Sep 30 20:16:43 crc kubenswrapper[4919]: I0930 20:16:43.467762 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 30 20:16:47 crc kubenswrapper[4919]: I0930 20:16:47.390231 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:47 crc kubenswrapper[4919]: I0930 20:16:47.443667 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:47 crc kubenswrapper[4919]: I0930 20:16:47.836847 4919 generic.go:334] "Generic (PLEG): container finished" podID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" containerID="09de5333ab861b12c7caf1a24e0d8edec47e23efc8414d92d4c76ff5b6b5535c" exitCode=0 Sep 30 20:16:47 crc kubenswrapper[4919]: I0930 20:16:47.836909 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jm7dz" event={"ID":"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7","Type":"ContainerDied","Data":"09de5333ab861b12c7caf1a24e0d8edec47e23efc8414d92d4c76ff5b6b5535c"} Sep 30 20:16:48 crc kubenswrapper[4919]: I0930 20:16:48.088170 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-95mmt"] Sep 30 20:16:48 crc kubenswrapper[4919]: I0930 20:16:48.851112 4919 generic.go:334] "Generic (PLEG): container finished" podID="d8339c07-c7d4-4da0-8927-b99887894379" containerID="aed8615f1a95c7659fc3e9408946272aa46a8f9578934cf3b761828c50c490c1" exitCode=0 Sep 30 20:16:48 crc kubenswrapper[4919]: I0930 20:16:48.851394 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p6xv9" event={"ID":"d8339c07-c7d4-4da0-8927-b99887894379","Type":"ContainerDied","Data":"aed8615f1a95c7659fc3e9408946272aa46a8f9578934cf3b761828c50c490c1"} Sep 30 20:16:48 crc kubenswrapper[4919]: I0930 20:16:48.865207 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jm7dz" event={"ID":"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7","Type":"ContainerStarted","Data":"ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a"} Sep 30 20:16:48 crc kubenswrapper[4919]: I0930 20:16:48.865467 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-95mmt" podUID="9394b462-e717-46a0-b247-57181adb5d6a" containerName="registry-server" containerID="cri-o://f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9" gracePeriod=2 Sep 30 20:16:48 crc kubenswrapper[4919]: I0930 20:16:48.915967 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jm7dz" podStartSLOduration=2.87072112 podStartE2EDuration="43.915947453s" podCreationTimestamp="2025-09-30 20:16:05 +0000 UTC" firstStartedPulling="2025-09-30 20:16:07.402749745 +0000 UTC m=+152.518782872" lastFinishedPulling="2025-09-30 20:16:48.447976078 +0000 UTC m=+193.564009205" observedRunningTime="2025-09-30 20:16:48.913999837 +0000 UTC m=+194.030032974" watchObservedRunningTime="2025-09-30 20:16:48.915947453 +0000 UTC m=+194.031980580" Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.329857 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.439829 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xxqj\" (UniqueName: \"kubernetes.io/projected/9394b462-e717-46a0-b247-57181adb5d6a-kube-api-access-2xxqj\") pod \"9394b462-e717-46a0-b247-57181adb5d6a\" (UID: \"9394b462-e717-46a0-b247-57181adb5d6a\") " Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.439999 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9394b462-e717-46a0-b247-57181adb5d6a-utilities\") pod \"9394b462-e717-46a0-b247-57181adb5d6a\" (UID: \"9394b462-e717-46a0-b247-57181adb5d6a\") " Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.440042 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9394b462-e717-46a0-b247-57181adb5d6a-catalog-content\") pod \"9394b462-e717-46a0-b247-57181adb5d6a\" (UID: \"9394b462-e717-46a0-b247-57181adb5d6a\") " Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.441027 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9394b462-e717-46a0-b247-57181adb5d6a-utilities" (OuterVolumeSpecName: "utilities") pod "9394b462-e717-46a0-b247-57181adb5d6a" (UID: "9394b462-e717-46a0-b247-57181adb5d6a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.445878 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9394b462-e717-46a0-b247-57181adb5d6a-kube-api-access-2xxqj" (OuterVolumeSpecName: "kube-api-access-2xxqj") pod "9394b462-e717-46a0-b247-57181adb5d6a" (UID: "9394b462-e717-46a0-b247-57181adb5d6a"). InnerVolumeSpecName "kube-api-access-2xxqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.541384 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9394b462-e717-46a0-b247-57181adb5d6a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.541419 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xxqj\" (UniqueName: \"kubernetes.io/projected/9394b462-e717-46a0-b247-57181adb5d6a-kube-api-access-2xxqj\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.542656 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9394b462-e717-46a0-b247-57181adb5d6a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9394b462-e717-46a0-b247-57181adb5d6a" (UID: "9394b462-e717-46a0-b247-57181adb5d6a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.643466 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9394b462-e717-46a0-b247-57181adb5d6a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.879577 4919 generic.go:334] "Generic (PLEG): container finished" podID="283bf417-5302-4743-8a44-76fa61eba04b" containerID="22c7e5d19ae099b4cd141324cedcc19b950625ddfe50767baa8a7d1389810baa" exitCode=0 Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.879644 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f7p6" event={"ID":"283bf417-5302-4743-8a44-76fa61eba04b","Type":"ContainerDied","Data":"22c7e5d19ae099b4cd141324cedcc19b950625ddfe50767baa8a7d1389810baa"} Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.884654 4919 generic.go:334] "Generic (PLEG): container finished" podID="9394b462-e717-46a0-b247-57181adb5d6a" containerID="f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9" exitCode=0 Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.884705 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95mmt" event={"ID":"9394b462-e717-46a0-b247-57181adb5d6a","Type":"ContainerDied","Data":"f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9"} Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.884753 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-95mmt" event={"ID":"9394b462-e717-46a0-b247-57181adb5d6a","Type":"ContainerDied","Data":"b7382558a3a215264dd7afe758038aa5e38e2a0e20249ebc30e743241bc51f40"} Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.884780 4919 scope.go:117] "RemoveContainer" containerID="f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9" Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.884997 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-95mmt" Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.933759 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-95mmt"] Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.936317 4919 scope.go:117] "RemoveContainer" containerID="7950fe638ef4bd2a963370f883d5b5bdd044e3fba50919d01348a3a548aced8d" Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.937551 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-95mmt"] Sep 30 20:16:49 crc kubenswrapper[4919]: I0930 20:16:49.962695 4919 scope.go:117] "RemoveContainer" containerID="c49f49b408cc9b03fe80735dcdf4eb1d18c8328f62c0189431f5f67f9411f12d" Sep 30 20:16:50 crc kubenswrapper[4919]: I0930 20:16:50.045025 4919 scope.go:117] "RemoveContainer" containerID="f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9" Sep 30 20:16:50 crc kubenswrapper[4919]: E0930 20:16:50.045823 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9\": container with ID starting with f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9 not found: ID does not exist" containerID="f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9" Sep 30 20:16:50 crc kubenswrapper[4919]: I0930 20:16:50.045881 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9"} err="failed to get container status \"f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9\": rpc error: code = NotFound desc = could not find container \"f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9\": container with ID starting with f06b2c5f7e140b762f6357f9b425fc6eab1c441a7b3dd30e39819bd640a1f9c9 not found: ID does not exist" Sep 30 20:16:50 crc kubenswrapper[4919]: I0930 20:16:50.045940 4919 scope.go:117] "RemoveContainer" containerID="7950fe638ef4bd2a963370f883d5b5bdd044e3fba50919d01348a3a548aced8d" Sep 30 20:16:50 crc kubenswrapper[4919]: E0930 20:16:50.046538 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7950fe638ef4bd2a963370f883d5b5bdd044e3fba50919d01348a3a548aced8d\": container with ID starting with 7950fe638ef4bd2a963370f883d5b5bdd044e3fba50919d01348a3a548aced8d not found: ID does not exist" containerID="7950fe638ef4bd2a963370f883d5b5bdd044e3fba50919d01348a3a548aced8d" Sep 30 20:16:50 crc kubenswrapper[4919]: I0930 20:16:50.046593 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7950fe638ef4bd2a963370f883d5b5bdd044e3fba50919d01348a3a548aced8d"} err="failed to get container status \"7950fe638ef4bd2a963370f883d5b5bdd044e3fba50919d01348a3a548aced8d\": rpc error: code = NotFound desc = could not find container \"7950fe638ef4bd2a963370f883d5b5bdd044e3fba50919d01348a3a548aced8d\": container with ID starting with 7950fe638ef4bd2a963370f883d5b5bdd044e3fba50919d01348a3a548aced8d not found: ID does not exist" Sep 30 20:16:50 crc kubenswrapper[4919]: I0930 20:16:50.046627 4919 scope.go:117] "RemoveContainer" containerID="c49f49b408cc9b03fe80735dcdf4eb1d18c8328f62c0189431f5f67f9411f12d" Sep 30 20:16:50 crc kubenswrapper[4919]: E0930 20:16:50.047836 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c49f49b408cc9b03fe80735dcdf4eb1d18c8328f62c0189431f5f67f9411f12d\": container with ID starting with c49f49b408cc9b03fe80735dcdf4eb1d18c8328f62c0189431f5f67f9411f12d not found: ID does not exist" containerID="c49f49b408cc9b03fe80735dcdf4eb1d18c8328f62c0189431f5f67f9411f12d" Sep 30 20:16:50 crc kubenswrapper[4919]: I0930 20:16:50.047861 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c49f49b408cc9b03fe80735dcdf4eb1d18c8328f62c0189431f5f67f9411f12d"} err="failed to get container status \"c49f49b408cc9b03fe80735dcdf4eb1d18c8328f62c0189431f5f67f9411f12d\": rpc error: code = NotFound desc = could not find container \"c49f49b408cc9b03fe80735dcdf4eb1d18c8328f62c0189431f5f67f9411f12d\": container with ID starting with c49f49b408cc9b03fe80735dcdf4eb1d18c8328f62c0189431f5f67f9411f12d not found: ID does not exist" Sep 30 20:16:50 crc kubenswrapper[4919]: I0930 20:16:50.892194 4919 generic.go:334] "Generic (PLEG): container finished" podID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" containerID="249153e49c3443c86484419db7ff817559d9236d721a2e2ad2f6e307029230e1" exitCode=0 Sep 30 20:16:50 crc kubenswrapper[4919]: I0930 20:16:50.892318 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-54vv5" event={"ID":"f4ca8312-ff90-418e-8503-8acc3f9d63b1","Type":"ContainerDied","Data":"249153e49c3443c86484419db7ff817559d9236d721a2e2ad2f6e307029230e1"} Sep 30 20:16:50 crc kubenswrapper[4919]: I0930 20:16:50.896441 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p6xv9" event={"ID":"d8339c07-c7d4-4da0-8927-b99887894379","Type":"ContainerStarted","Data":"17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d"} Sep 30 20:16:50 crc kubenswrapper[4919]: I0930 20:16:50.899404 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ddzjp" event={"ID":"dcb560ec-da35-4b84-86f5-e56a181c4194","Type":"ContainerStarted","Data":"77b5120a31eaedf751e5825448904d99e47eb5691c09b537cb8a56bfe6f7a6d0"} Sep 30 20:16:50 crc kubenswrapper[4919]: I0930 20:16:50.946722 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p6xv9" podStartSLOduration=3.687450806 podStartE2EDuration="44.946699939s" podCreationTimestamp="2025-09-30 20:16:06 +0000 UTC" firstStartedPulling="2025-09-30 20:16:08.457396549 +0000 UTC m=+153.573429676" lastFinishedPulling="2025-09-30 20:16:49.716645682 +0000 UTC m=+194.832678809" observedRunningTime="2025-09-30 20:16:50.93882017 +0000 UTC m=+196.054853307" watchObservedRunningTime="2025-09-30 20:16:50.946699939 +0000 UTC m=+196.062733066" Sep 30 20:16:51 crc kubenswrapper[4919]: I0930 20:16:51.641831 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9394b462-e717-46a0-b247-57181adb5d6a" path="/var/lib/kubelet/pods/9394b462-e717-46a0-b247-57181adb5d6a/volumes" Sep 30 20:16:51 crc kubenswrapper[4919]: I0930 20:16:51.905313 4919 generic.go:334] "Generic (PLEG): container finished" podID="dcb560ec-da35-4b84-86f5-e56a181c4194" containerID="77b5120a31eaedf751e5825448904d99e47eb5691c09b537cb8a56bfe6f7a6d0" exitCode=0 Sep 30 20:16:51 crc kubenswrapper[4919]: I0930 20:16:51.905375 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ddzjp" event={"ID":"dcb560ec-da35-4b84-86f5-e56a181c4194","Type":"ContainerDied","Data":"77b5120a31eaedf751e5825448904d99e47eb5691c09b537cb8a56bfe6f7a6d0"} Sep 30 20:16:51 crc kubenswrapper[4919]: I0930 20:16:51.908055 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f7p6" event={"ID":"283bf417-5302-4743-8a44-76fa61eba04b","Type":"ContainerStarted","Data":"c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa"} Sep 30 20:16:51 crc kubenswrapper[4919]: I0930 20:16:51.935137 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7f7p6" podStartSLOduration=3.25526433 podStartE2EDuration="48.935117628s" podCreationTimestamp="2025-09-30 20:16:03 +0000 UTC" firstStartedPulling="2025-09-30 20:16:05.304535457 +0000 UTC m=+150.420568594" lastFinishedPulling="2025-09-30 20:16:50.984388765 +0000 UTC m=+196.100421892" observedRunningTime="2025-09-30 20:16:51.933981293 +0000 UTC m=+197.050014430" watchObservedRunningTime="2025-09-30 20:16:51.935117628 +0000 UTC m=+197.051150755" Sep 30 20:16:53 crc kubenswrapper[4919]: I0930 20:16:53.711864 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:53 crc kubenswrapper[4919]: I0930 20:16:53.711952 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:53 crc kubenswrapper[4919]: I0930 20:16:53.769535 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:16:56 crc kubenswrapper[4919]: I0930 20:16:56.061917 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:16:56 crc kubenswrapper[4919]: I0930 20:16:56.062421 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:16:56 crc kubenswrapper[4919]: I0930 20:16:56.085650 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:56 crc kubenswrapper[4919]: I0930 20:16:56.085721 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:56 crc kubenswrapper[4919]: I0930 20:16:56.153582 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:56 crc kubenswrapper[4919]: I0930 20:16:56.902416 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:56 crc kubenswrapper[4919]: I0930 20:16:56.902773 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:56 crc kubenswrapper[4919]: I0930 20:16:56.951384 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:56 crc kubenswrapper[4919]: I0930 20:16:56.997915 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:57 crc kubenswrapper[4919]: I0930 20:16:57.009663 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:16:57 crc kubenswrapper[4919]: I0930 20:16:57.947764 4919 generic.go:334] "Generic (PLEG): container finished" podID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" containerID="3909b2e54f8a36162f2dead8f406ebb07a32e758fdf84a1c87c54bacb8ada6ad" exitCode=0 Sep 30 20:16:57 crc kubenswrapper[4919]: I0930 20:16:57.947914 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98qcl" event={"ID":"82021dd4-fbb8-4832-a38d-cd00aa9d786b","Type":"ContainerDied","Data":"3909b2e54f8a36162f2dead8f406ebb07a32e758fdf84a1c87c54bacb8ada6ad"} Sep 30 20:16:57 crc kubenswrapper[4919]: I0930 20:16:57.953594 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-54vv5" event={"ID":"f4ca8312-ff90-418e-8503-8acc3f9d63b1","Type":"ContainerStarted","Data":"113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d"} Sep 30 20:16:57 crc kubenswrapper[4919]: I0930 20:16:57.959151 4919 generic.go:334] "Generic (PLEG): container finished" podID="14a021e6-225d-498c-aa4c-008e2ad9580d" containerID="1b45c32497ae486d7a95bbf2c0024652a26ac9f4c732e95931920e8b9be3aaa7" exitCode=0 Sep 30 20:16:57 crc kubenswrapper[4919]: I0930 20:16:57.959286 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7grc2" event={"ID":"14a021e6-225d-498c-aa4c-008e2ad9580d","Type":"ContainerDied","Data":"1b45c32497ae486d7a95bbf2c0024652a26ac9f4c732e95931920e8b9be3aaa7"} Sep 30 20:16:57 crc kubenswrapper[4919]: I0930 20:16:57.970589 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ddzjp" event={"ID":"dcb560ec-da35-4b84-86f5-e56a181c4194","Type":"ContainerStarted","Data":"512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37"} Sep 30 20:16:58 crc kubenswrapper[4919]: I0930 20:16:58.003052 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ddzjp" podStartSLOduration=3.706567154 podStartE2EDuration="55.00303263s" podCreationTimestamp="2025-09-30 20:16:03 +0000 UTC" firstStartedPulling="2025-09-30 20:16:05.293319072 +0000 UTC m=+150.409352199" lastFinishedPulling="2025-09-30 20:16:56.589784508 +0000 UTC m=+201.705817675" observedRunningTime="2025-09-30 20:16:58.00037644 +0000 UTC m=+203.116409567" watchObservedRunningTime="2025-09-30 20:16:58.00303263 +0000 UTC m=+203.119065757" Sep 30 20:16:58 crc kubenswrapper[4919]: I0930 20:16:58.481236 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jm7dz"] Sep 30 20:16:58 crc kubenswrapper[4919]: I0930 20:16:58.978468 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7grc2" event={"ID":"14a021e6-225d-498c-aa4c-008e2ad9580d","Type":"ContainerStarted","Data":"9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee"} Sep 30 20:16:58 crc kubenswrapper[4919]: I0930 20:16:58.982794 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98qcl" event={"ID":"82021dd4-fbb8-4832-a38d-cd00aa9d786b","Type":"ContainerStarted","Data":"d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232"} Sep 30 20:16:58 crc kubenswrapper[4919]: I0930 20:16:58.983405 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jm7dz" podUID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" containerName="registry-server" containerID="cri-o://ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a" gracePeriod=2 Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.008949 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7grc2" podStartSLOduration=2.680115822 podStartE2EDuration="56.008925086s" podCreationTimestamp="2025-09-30 20:16:03 +0000 UTC" firstStartedPulling="2025-09-30 20:16:05.310811928 +0000 UTC m=+150.426845055" lastFinishedPulling="2025-09-30 20:16:58.639621192 +0000 UTC m=+203.755654319" observedRunningTime="2025-09-30 20:16:59.007267349 +0000 UTC m=+204.123300466" watchObservedRunningTime="2025-09-30 20:16:59.008925086 +0000 UTC m=+204.124958213" Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.031981 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-98qcl" podStartSLOduration=2.019923082 podStartE2EDuration="54.031963829s" podCreationTimestamp="2025-09-30 20:16:05 +0000 UTC" firstStartedPulling="2025-09-30 20:16:06.360824129 +0000 UTC m=+151.476857256" lastFinishedPulling="2025-09-30 20:16:58.372864876 +0000 UTC m=+203.488898003" observedRunningTime="2025-09-30 20:16:59.030845864 +0000 UTC m=+204.146878991" watchObservedRunningTime="2025-09-30 20:16:59.031963829 +0000 UTC m=+204.147996956" Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.051642 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-54vv5" podStartSLOduration=4.884748307 podStartE2EDuration="56.051624546s" podCreationTimestamp="2025-09-30 20:16:03 +0000 UTC" firstStartedPulling="2025-09-30 20:16:05.305422262 +0000 UTC m=+150.421455409" lastFinishedPulling="2025-09-30 20:16:56.472298481 +0000 UTC m=+201.588331648" observedRunningTime="2025-09-30 20:16:59.048443773 +0000 UTC m=+204.164476890" watchObservedRunningTime="2025-09-30 20:16:59.051624546 +0000 UTC m=+204.167657673" Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.388006 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.404331 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-catalog-content\") pod \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\" (UID: \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\") " Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.404381 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-utilities\") pod \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\" (UID: \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\") " Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.404429 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnbqx\" (UniqueName: \"kubernetes.io/projected/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-kube-api-access-cnbqx\") pod \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\" (UID: \"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7\") " Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.405151 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-utilities" (OuterVolumeSpecName: "utilities") pod "ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" (UID: "ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.417290 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" (UID: "ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.417722 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-kube-api-access-cnbqx" (OuterVolumeSpecName: "kube-api-access-cnbqx") pod "ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" (UID: "ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7"). InnerVolumeSpecName "kube-api-access-cnbqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.505920 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.505959 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnbqx\" (UniqueName: \"kubernetes.io/projected/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-kube-api-access-cnbqx\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.505972 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.993250 4919 generic.go:334] "Generic (PLEG): container finished" podID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" containerID="ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a" exitCode=0 Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.993460 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jm7dz" event={"ID":"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7","Type":"ContainerDied","Data":"ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a"} Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.993866 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jm7dz" event={"ID":"ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7","Type":"ContainerDied","Data":"623670a2ac6002cf76b8903bd03a28368f37e51a9f10eff001ef2411cfb80dab"} Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.993902 4919 scope.go:117] "RemoveContainer" containerID="ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a" Sep 30 20:16:59 crc kubenswrapper[4919]: I0930 20:16:59.993584 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jm7dz" Sep 30 20:17:00 crc kubenswrapper[4919]: I0930 20:17:00.027534 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jm7dz"] Sep 30 20:17:00 crc kubenswrapper[4919]: I0930 20:17:00.033805 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jm7dz"] Sep 30 20:17:00 crc kubenswrapper[4919]: I0930 20:17:00.034920 4919 scope.go:117] "RemoveContainer" containerID="09de5333ab861b12c7caf1a24e0d8edec47e23efc8414d92d4c76ff5b6b5535c" Sep 30 20:17:00 crc kubenswrapper[4919]: I0930 20:17:00.059324 4919 scope.go:117] "RemoveContainer" containerID="277a35ea4af9c957da4a931c73eee80e334a343993f3522db39510791c31ba61" Sep 30 20:17:00 crc kubenswrapper[4919]: I0930 20:17:00.088010 4919 scope.go:117] "RemoveContainer" containerID="ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a" Sep 30 20:17:00 crc kubenswrapper[4919]: E0930 20:17:00.088616 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a\": container with ID starting with ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a not found: ID does not exist" containerID="ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a" Sep 30 20:17:00 crc kubenswrapper[4919]: I0930 20:17:00.088666 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a"} err="failed to get container status \"ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a\": rpc error: code = NotFound desc = could not find container \"ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a\": container with ID starting with ed55184ab7d4376b4e68c5854bc6baeab8ba363adb43f5682778abf292982a5a not found: ID does not exist" Sep 30 20:17:00 crc kubenswrapper[4919]: I0930 20:17:00.088702 4919 scope.go:117] "RemoveContainer" containerID="09de5333ab861b12c7caf1a24e0d8edec47e23efc8414d92d4c76ff5b6b5535c" Sep 30 20:17:00 crc kubenswrapper[4919]: E0930 20:17:00.089028 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09de5333ab861b12c7caf1a24e0d8edec47e23efc8414d92d4c76ff5b6b5535c\": container with ID starting with 09de5333ab861b12c7caf1a24e0d8edec47e23efc8414d92d4c76ff5b6b5535c not found: ID does not exist" containerID="09de5333ab861b12c7caf1a24e0d8edec47e23efc8414d92d4c76ff5b6b5535c" Sep 30 20:17:00 crc kubenswrapper[4919]: I0930 20:17:00.089065 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09de5333ab861b12c7caf1a24e0d8edec47e23efc8414d92d4c76ff5b6b5535c"} err="failed to get container status \"09de5333ab861b12c7caf1a24e0d8edec47e23efc8414d92d4c76ff5b6b5535c\": rpc error: code = NotFound desc = could not find container \"09de5333ab861b12c7caf1a24e0d8edec47e23efc8414d92d4c76ff5b6b5535c\": container with ID starting with 09de5333ab861b12c7caf1a24e0d8edec47e23efc8414d92d4c76ff5b6b5535c not found: ID does not exist" Sep 30 20:17:00 crc kubenswrapper[4919]: I0930 20:17:00.089093 4919 scope.go:117] "RemoveContainer" containerID="277a35ea4af9c957da4a931c73eee80e334a343993f3522db39510791c31ba61" Sep 30 20:17:00 crc kubenswrapper[4919]: E0930 20:17:00.089644 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"277a35ea4af9c957da4a931c73eee80e334a343993f3522db39510791c31ba61\": container with ID starting with 277a35ea4af9c957da4a931c73eee80e334a343993f3522db39510791c31ba61 not found: ID does not exist" containerID="277a35ea4af9c957da4a931c73eee80e334a343993f3522db39510791c31ba61" Sep 30 20:17:00 crc kubenswrapper[4919]: I0930 20:17:00.089666 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"277a35ea4af9c957da4a931c73eee80e334a343993f3522db39510791c31ba61"} err="failed to get container status \"277a35ea4af9c957da4a931c73eee80e334a343993f3522db39510791c31ba61\": rpc error: code = NotFound desc = could not find container \"277a35ea4af9c957da4a931c73eee80e334a343993f3522db39510791c31ba61\": container with ID starting with 277a35ea4af9c957da4a931c73eee80e334a343993f3522db39510791c31ba61 not found: ID does not exist" Sep 30 20:17:01 crc kubenswrapper[4919]: I0930 20:17:01.640336 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" path="/var/lib/kubelet/pods/ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7/volumes" Sep 30 20:17:03 crc kubenswrapper[4919]: I0930 20:17:03.769973 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:17:03 crc kubenswrapper[4919]: I0930 20:17:03.881498 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:17:03 crc kubenswrapper[4919]: I0930 20:17:03.881555 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:17:03 crc kubenswrapper[4919]: I0930 20:17:03.930189 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:17:04 crc kubenswrapper[4919]: I0930 20:17:04.084188 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:17:04 crc kubenswrapper[4919]: I0930 20:17:04.298755 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:17:04 crc kubenswrapper[4919]: I0930 20:17:04.298822 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:17:04 crc kubenswrapper[4919]: I0930 20:17:04.353957 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:17:04 crc kubenswrapper[4919]: I0930 20:17:04.391423 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:17:04 crc kubenswrapper[4919]: I0930 20:17:04.391476 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:17:04 crc kubenswrapper[4919]: I0930 20:17:04.445950 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:17:05 crc kubenswrapper[4919]: I0930 20:17:05.077258 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:17:05 crc kubenswrapper[4919]: I0930 20:17:05.080321 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:17:05 crc kubenswrapper[4919]: I0930 20:17:05.742502 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:17:05 crc kubenswrapper[4919]: I0930 20:17:05.742616 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:17:05 crc kubenswrapper[4919]: I0930 20:17:05.782755 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:17:06 crc kubenswrapper[4919]: I0930 20:17:06.088738 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:17:06 crc kubenswrapper[4919]: I0930 20:17:06.282920 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ddzjp"] Sep 30 20:17:06 crc kubenswrapper[4919]: I0930 20:17:06.484872 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7grc2"] Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.047775 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ddzjp" podUID="dcb560ec-da35-4b84-86f5-e56a181c4194" containerName="registry-server" containerID="cri-o://512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37" gracePeriod=2 Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.048318 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7grc2" podUID="14a021e6-225d-498c-aa4c-008e2ad9580d" containerName="registry-server" containerID="cri-o://9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee" gracePeriod=2 Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.436852 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.442729 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.631814 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcb560ec-da35-4b84-86f5-e56a181c4194-catalog-content\") pod \"dcb560ec-da35-4b84-86f5-e56a181c4194\" (UID: \"dcb560ec-da35-4b84-86f5-e56a181c4194\") " Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.632190 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dw4f\" (UniqueName: \"kubernetes.io/projected/14a021e6-225d-498c-aa4c-008e2ad9580d-kube-api-access-5dw4f\") pod \"14a021e6-225d-498c-aa4c-008e2ad9580d\" (UID: \"14a021e6-225d-498c-aa4c-008e2ad9580d\") " Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.632242 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a021e6-225d-498c-aa4c-008e2ad9580d-catalog-content\") pod \"14a021e6-225d-498c-aa4c-008e2ad9580d\" (UID: \"14a021e6-225d-498c-aa4c-008e2ad9580d\") " Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.632261 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a021e6-225d-498c-aa4c-008e2ad9580d-utilities\") pod \"14a021e6-225d-498c-aa4c-008e2ad9580d\" (UID: \"14a021e6-225d-498c-aa4c-008e2ad9580d\") " Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.632315 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcb560ec-da35-4b84-86f5-e56a181c4194-utilities\") pod \"dcb560ec-da35-4b84-86f5-e56a181c4194\" (UID: \"dcb560ec-da35-4b84-86f5-e56a181c4194\") " Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.632357 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vwg2\" (UniqueName: \"kubernetes.io/projected/dcb560ec-da35-4b84-86f5-e56a181c4194-kube-api-access-7vwg2\") pod \"dcb560ec-da35-4b84-86f5-e56a181c4194\" (UID: \"dcb560ec-da35-4b84-86f5-e56a181c4194\") " Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.633248 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14a021e6-225d-498c-aa4c-008e2ad9580d-utilities" (OuterVolumeSpecName: "utilities") pod "14a021e6-225d-498c-aa4c-008e2ad9580d" (UID: "14a021e6-225d-498c-aa4c-008e2ad9580d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.634689 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcb560ec-da35-4b84-86f5-e56a181c4194-utilities" (OuterVolumeSpecName: "utilities") pod "dcb560ec-da35-4b84-86f5-e56a181c4194" (UID: "dcb560ec-da35-4b84-86f5-e56a181c4194"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.637916 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14a021e6-225d-498c-aa4c-008e2ad9580d-kube-api-access-5dw4f" (OuterVolumeSpecName: "kube-api-access-5dw4f") pod "14a021e6-225d-498c-aa4c-008e2ad9580d" (UID: "14a021e6-225d-498c-aa4c-008e2ad9580d"). InnerVolumeSpecName "kube-api-access-5dw4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.638354 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcb560ec-da35-4b84-86f5-e56a181c4194-kube-api-access-7vwg2" (OuterVolumeSpecName: "kube-api-access-7vwg2") pod "dcb560ec-da35-4b84-86f5-e56a181c4194" (UID: "dcb560ec-da35-4b84-86f5-e56a181c4194"). InnerVolumeSpecName "kube-api-access-7vwg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.691494 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcb560ec-da35-4b84-86f5-e56a181c4194-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dcb560ec-da35-4b84-86f5-e56a181c4194" (UID: "dcb560ec-da35-4b84-86f5-e56a181c4194"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.692509 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14a021e6-225d-498c-aa4c-008e2ad9580d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "14a021e6-225d-498c-aa4c-008e2ad9580d" (UID: "14a021e6-225d-498c-aa4c-008e2ad9580d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.733963 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dw4f\" (UniqueName: \"kubernetes.io/projected/14a021e6-225d-498c-aa4c-008e2ad9580d-kube-api-access-5dw4f\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.733997 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14a021e6-225d-498c-aa4c-008e2ad9580d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.734010 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14a021e6-225d-498c-aa4c-008e2ad9580d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.734022 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dcb560ec-da35-4b84-86f5-e56a181c4194-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.734034 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vwg2\" (UniqueName: \"kubernetes.io/projected/dcb560ec-da35-4b84-86f5-e56a181c4194-kube-api-access-7vwg2\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:07 crc kubenswrapper[4919]: I0930 20:17:07.734045 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dcb560ec-da35-4b84-86f5-e56a181c4194-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.054114 4919 generic.go:334] "Generic (PLEG): container finished" podID="14a021e6-225d-498c-aa4c-008e2ad9580d" containerID="9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee" exitCode=0 Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.054172 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7grc2" event={"ID":"14a021e6-225d-498c-aa4c-008e2ad9580d","Type":"ContainerDied","Data":"9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee"} Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.054198 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7grc2" event={"ID":"14a021e6-225d-498c-aa4c-008e2ad9580d","Type":"ContainerDied","Data":"4baf8f5319c587771da80b103636a906e40a82e036e4ef62c50a78dd7a053f2b"} Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.054250 4919 scope.go:117] "RemoveContainer" containerID="9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.054358 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7grc2" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.059821 4919 generic.go:334] "Generic (PLEG): container finished" podID="dcb560ec-da35-4b84-86f5-e56a181c4194" containerID="512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37" exitCode=0 Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.059883 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ddzjp" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.059923 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ddzjp" event={"ID":"dcb560ec-da35-4b84-86f5-e56a181c4194","Type":"ContainerDied","Data":"512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37"} Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.059953 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ddzjp" event={"ID":"dcb560ec-da35-4b84-86f5-e56a181c4194","Type":"ContainerDied","Data":"4e10505f3d05c46af6e182463ef346e8bcb28bf19e66324fced0a9fbd0278211"} Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.085388 4919 scope.go:117] "RemoveContainer" containerID="1b45c32497ae486d7a95bbf2c0024652a26ac9f4c732e95931920e8b9be3aaa7" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.088376 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7grc2"] Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.091608 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7grc2"] Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.105414 4919 scope.go:117] "RemoveContainer" containerID="f8b28f397786636190cbc35a6237cea016f54df6584ea0756f45ce0a16b43f22" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.113999 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ddzjp"] Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.118339 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ddzjp"] Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.139046 4919 scope.go:117] "RemoveContainer" containerID="9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee" Sep 30 20:17:08 crc kubenswrapper[4919]: E0930 20:17:08.139436 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee\": container with ID starting with 9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee not found: ID does not exist" containerID="9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.139471 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee"} err="failed to get container status \"9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee\": rpc error: code = NotFound desc = could not find container \"9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee\": container with ID starting with 9616d47267cc5ccddfa22937e148c7f7ee4110f694f580f6f7b278f1e8dae9ee not found: ID does not exist" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.139500 4919 scope.go:117] "RemoveContainer" containerID="1b45c32497ae486d7a95bbf2c0024652a26ac9f4c732e95931920e8b9be3aaa7" Sep 30 20:17:08 crc kubenswrapper[4919]: E0930 20:17:08.139809 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b45c32497ae486d7a95bbf2c0024652a26ac9f4c732e95931920e8b9be3aaa7\": container with ID starting with 1b45c32497ae486d7a95bbf2c0024652a26ac9f4c732e95931920e8b9be3aaa7 not found: ID does not exist" containerID="1b45c32497ae486d7a95bbf2c0024652a26ac9f4c732e95931920e8b9be3aaa7" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.139833 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b45c32497ae486d7a95bbf2c0024652a26ac9f4c732e95931920e8b9be3aaa7"} err="failed to get container status \"1b45c32497ae486d7a95bbf2c0024652a26ac9f4c732e95931920e8b9be3aaa7\": rpc error: code = NotFound desc = could not find container \"1b45c32497ae486d7a95bbf2c0024652a26ac9f4c732e95931920e8b9be3aaa7\": container with ID starting with 1b45c32497ae486d7a95bbf2c0024652a26ac9f4c732e95931920e8b9be3aaa7 not found: ID does not exist" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.139846 4919 scope.go:117] "RemoveContainer" containerID="f8b28f397786636190cbc35a6237cea016f54df6584ea0756f45ce0a16b43f22" Sep 30 20:17:08 crc kubenswrapper[4919]: E0930 20:17:08.140099 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8b28f397786636190cbc35a6237cea016f54df6584ea0756f45ce0a16b43f22\": container with ID starting with f8b28f397786636190cbc35a6237cea016f54df6584ea0756f45ce0a16b43f22 not found: ID does not exist" containerID="f8b28f397786636190cbc35a6237cea016f54df6584ea0756f45ce0a16b43f22" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.140131 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8b28f397786636190cbc35a6237cea016f54df6584ea0756f45ce0a16b43f22"} err="failed to get container status \"f8b28f397786636190cbc35a6237cea016f54df6584ea0756f45ce0a16b43f22\": rpc error: code = NotFound desc = could not find container \"f8b28f397786636190cbc35a6237cea016f54df6584ea0756f45ce0a16b43f22\": container with ID starting with f8b28f397786636190cbc35a6237cea016f54df6584ea0756f45ce0a16b43f22 not found: ID does not exist" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.140312 4919 scope.go:117] "RemoveContainer" containerID="512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.154011 4919 scope.go:117] "RemoveContainer" containerID="77b5120a31eaedf751e5825448904d99e47eb5691c09b537cb8a56bfe6f7a6d0" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.168350 4919 scope.go:117] "RemoveContainer" containerID="0cadd33062ee2c56a1e687c7ee3e223afdad61aec076462b52e9a9f83b8d5404" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.187352 4919 scope.go:117] "RemoveContainer" containerID="512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37" Sep 30 20:17:08 crc kubenswrapper[4919]: E0930 20:17:08.187847 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37\": container with ID starting with 512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37 not found: ID does not exist" containerID="512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.187882 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37"} err="failed to get container status \"512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37\": rpc error: code = NotFound desc = could not find container \"512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37\": container with ID starting with 512a0361e7984388a53e6025f1c7e8e77517d63125d95bee76858b9a4e31cd37 not found: ID does not exist" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.187911 4919 scope.go:117] "RemoveContainer" containerID="77b5120a31eaedf751e5825448904d99e47eb5691c09b537cb8a56bfe6f7a6d0" Sep 30 20:17:08 crc kubenswrapper[4919]: E0930 20:17:08.188324 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77b5120a31eaedf751e5825448904d99e47eb5691c09b537cb8a56bfe6f7a6d0\": container with ID starting with 77b5120a31eaedf751e5825448904d99e47eb5691c09b537cb8a56bfe6f7a6d0 not found: ID does not exist" containerID="77b5120a31eaedf751e5825448904d99e47eb5691c09b537cb8a56bfe6f7a6d0" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.188346 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77b5120a31eaedf751e5825448904d99e47eb5691c09b537cb8a56bfe6f7a6d0"} err="failed to get container status \"77b5120a31eaedf751e5825448904d99e47eb5691c09b537cb8a56bfe6f7a6d0\": rpc error: code = NotFound desc = could not find container \"77b5120a31eaedf751e5825448904d99e47eb5691c09b537cb8a56bfe6f7a6d0\": container with ID starting with 77b5120a31eaedf751e5825448904d99e47eb5691c09b537cb8a56bfe6f7a6d0 not found: ID does not exist" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.188359 4919 scope.go:117] "RemoveContainer" containerID="0cadd33062ee2c56a1e687c7ee3e223afdad61aec076462b52e9a9f83b8d5404" Sep 30 20:17:08 crc kubenswrapper[4919]: E0930 20:17:08.188634 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cadd33062ee2c56a1e687c7ee3e223afdad61aec076462b52e9a9f83b8d5404\": container with ID starting with 0cadd33062ee2c56a1e687c7ee3e223afdad61aec076462b52e9a9f83b8d5404 not found: ID does not exist" containerID="0cadd33062ee2c56a1e687c7ee3e223afdad61aec076462b52e9a9f83b8d5404" Sep 30 20:17:08 crc kubenswrapper[4919]: I0930 20:17:08.188654 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cadd33062ee2c56a1e687c7ee3e223afdad61aec076462b52e9a9f83b8d5404"} err="failed to get container status \"0cadd33062ee2c56a1e687c7ee3e223afdad61aec076462b52e9a9f83b8d5404\": rpc error: code = NotFound desc = could not find container \"0cadd33062ee2c56a1e687c7ee3e223afdad61aec076462b52e9a9f83b8d5404\": container with ID starting with 0cadd33062ee2c56a1e687c7ee3e223afdad61aec076462b52e9a9f83b8d5404 not found: ID does not exist" Sep 30 20:17:09 crc kubenswrapper[4919]: I0930 20:17:09.638157 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14a021e6-225d-498c-aa4c-008e2ad9580d" path="/var/lib/kubelet/pods/14a021e6-225d-498c-aa4c-008e2ad9580d/volumes" Sep 30 20:17:09 crc kubenswrapper[4919]: I0930 20:17:09.639184 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcb560ec-da35-4b84-86f5-e56a181c4194" path="/var/lib/kubelet/pods/dcb560ec-da35-4b84-86f5-e56a181c4194/volumes" Sep 30 20:17:16 crc kubenswrapper[4919]: I0930 20:17:16.002739 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-s5kls"] Sep 30 20:17:26 crc kubenswrapper[4919]: I0930 20:17:26.062183 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:17:26 crc kubenswrapper[4919]: I0930 20:17:26.062800 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:17:26 crc kubenswrapper[4919]: I0930 20:17:26.062856 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:17:26 crc kubenswrapper[4919]: I0930 20:17:26.063531 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa"} pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:17:26 crc kubenswrapper[4919]: I0930 20:17:26.063594 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" containerID="cri-o://a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa" gracePeriod=600 Sep 30 20:17:27 crc kubenswrapper[4919]: I0930 20:17:27.176898 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb371a63-6d82-453e-930e-656710b97f10" containerID="a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa" exitCode=0 Sep 30 20:17:27 crc kubenswrapper[4919]: I0930 20:17:27.177440 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerDied","Data":"a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa"} Sep 30 20:17:27 crc kubenswrapper[4919]: I0930 20:17:27.177470 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"5a35d751e5b4d240d41cc223a5740a55816947b68cfc97f9ad89a3a19385bfe6"} Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.034301 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" podUID="42c8738a-aad8-4cc5-b18f-92eee2745673" containerName="oauth-openshift" containerID="cri-o://1857921e867b9189272ed41ddf38fe806199c5609c64beb36763c520478164d1" gracePeriod=15 Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.282070 4919 generic.go:334] "Generic (PLEG): container finished" podID="42c8738a-aad8-4cc5-b18f-92eee2745673" containerID="1857921e867b9189272ed41ddf38fe806199c5609c64beb36763c520478164d1" exitCode=0 Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.282152 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" event={"ID":"42c8738a-aad8-4cc5-b18f-92eee2745673","Type":"ContainerDied","Data":"1857921e867b9189272ed41ddf38fe806199c5609c64beb36763c520478164d1"} Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.415227 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.453958 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7557fdbcd4-62gl6"] Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454480 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a021e6-225d-498c-aa4c-008e2ad9580d" containerName="registry-server" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454491 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a021e6-225d-498c-aa4c-008e2ad9580d" containerName="registry-server" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454503 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42c8738a-aad8-4cc5-b18f-92eee2745673" containerName="oauth-openshift" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454509 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="42c8738a-aad8-4cc5-b18f-92eee2745673" containerName="oauth-openshift" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454518 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" containerName="registry-server" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454524 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" containerName="registry-server" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454534 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" containerName="extract-utilities" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454540 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" containerName="extract-utilities" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454546 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9394b462-e717-46a0-b247-57181adb5d6a" containerName="extract-content" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454552 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9394b462-e717-46a0-b247-57181adb5d6a" containerName="extract-content" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454561 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcb560ec-da35-4b84-86f5-e56a181c4194" containerName="extract-utilities" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454566 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcb560ec-da35-4b84-86f5-e56a181c4194" containerName="extract-utilities" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454572 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a021e6-225d-498c-aa4c-008e2ad9580d" containerName="extract-utilities" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454578 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a021e6-225d-498c-aa4c-008e2ad9580d" containerName="extract-utilities" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454590 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" containerName="extract-content" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454597 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" containerName="extract-content" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454606 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9394b462-e717-46a0-b247-57181adb5d6a" containerName="extract-utilities" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454611 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9394b462-e717-46a0-b247-57181adb5d6a" containerName="extract-utilities" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454620 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3" containerName="pruner" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454625 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3" containerName="pruner" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454631 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9394b462-e717-46a0-b247-57181adb5d6a" containerName="registry-server" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454636 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9394b462-e717-46a0-b247-57181adb5d6a" containerName="registry-server" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454644 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcb560ec-da35-4b84-86f5-e56a181c4194" containerName="registry-server" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454650 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcb560ec-da35-4b84-86f5-e56a181c4194" containerName="registry-server" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454656 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a021e6-225d-498c-aa4c-008e2ad9580d" containerName="extract-content" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454661 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a021e6-225d-498c-aa4c-008e2ad9580d" containerName="extract-content" Sep 30 20:17:41 crc kubenswrapper[4919]: E0930 20:17:41.454668 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcb560ec-da35-4b84-86f5-e56a181c4194" containerName="extract-content" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454674 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcb560ec-da35-4b84-86f5-e56a181c4194" containerName="extract-content" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454752 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fbe6b04-0e70-4ea7-ad79-9ce8b6b09bf3" containerName="pruner" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454764 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="42c8738a-aad8-4cc5-b18f-92eee2745673" containerName="oauth-openshift" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454773 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a021e6-225d-498c-aa4c-008e2ad9580d" containerName="registry-server" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454781 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9394b462-e717-46a0-b247-57181adb5d6a" containerName="registry-server" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454789 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddfd62ef-2f8f-4a38-bb6f-6fcba6737fc7" containerName="registry-server" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.454796 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcb560ec-da35-4b84-86f5-e56a181c4194" containerName="registry-server" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.455162 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.471308 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7557fdbcd4-62gl6"] Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.495638 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/42c8738a-aad8-4cc5-b18f-92eee2745673-audit-dir\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.495693 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-audit-policies\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.495753 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-provider-selection\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.495781 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-idp-0-file-data\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.495840 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-cliconfig\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.495869 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-service-ca\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.495891 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-ocp-branding-template\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.495916 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-error\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.495948 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-router-certs\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.495977 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-login\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.496033 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zn5w\" (UniqueName: \"kubernetes.io/projected/42c8738a-aad8-4cc5-b18f-92eee2745673-kube-api-access-9zn5w\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.496058 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-serving-cert\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.496081 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-trusted-ca-bundle\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.496107 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-session\") pod \"42c8738a-aad8-4cc5-b18f-92eee2745673\" (UID: \"42c8738a-aad8-4cc5-b18f-92eee2745673\") " Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.497389 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.497836 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.498254 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.498345 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/42c8738a-aad8-4cc5-b18f-92eee2745673-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.500517 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.502061 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42c8738a-aad8-4cc5-b18f-92eee2745673-kube-api-access-9zn5w" (OuterVolumeSpecName: "kube-api-access-9zn5w") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "kube-api-access-9zn5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.502108 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.502295 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.502770 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.504176 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.504545 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.506057 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.508205 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.508800 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "42c8738a-aad8-4cc5-b18f-92eee2745673" (UID: "42c8738a-aad8-4cc5-b18f-92eee2745673"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598025 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598091 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598137 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598440 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-service-ca\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598517 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598595 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598680 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-session\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598760 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-user-template-login\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598808 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldlxr\" (UniqueName: \"kubernetes.io/projected/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-kube-api-access-ldlxr\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598856 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-audit-dir\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598894 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-router-certs\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598931 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-audit-policies\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.598974 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599046 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-user-template-error\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599144 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zn5w\" (UniqueName: \"kubernetes.io/projected/42c8738a-aad8-4cc5-b18f-92eee2745673-kube-api-access-9zn5w\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599176 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599197 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599266 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599290 4919 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/42c8738a-aad8-4cc5-b18f-92eee2745673-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599309 4919 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599329 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599349 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599369 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599388 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599406 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599425 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599443 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.599461 4919 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/42c8738a-aad8-4cc5-b18f-92eee2745673-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.701262 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-service-ca\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.701327 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.701373 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.701443 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-session\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.701512 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-user-template-login\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.701575 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldlxr\" (UniqueName: \"kubernetes.io/projected/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-kube-api-access-ldlxr\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.701640 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-audit-dir\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.701703 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-router-certs\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.701765 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-audit-policies\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.701836 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.701903 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-user-template-error\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.701964 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.702008 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.702060 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.703311 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-audit-dir\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.704201 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-service-ca\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.704859 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-audit-policies\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.705458 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.705895 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.707536 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.708532 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.709145 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.709664 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-router-certs\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.711284 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-user-template-login\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.711742 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-session\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.713104 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-user-template-error\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.721933 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.734006 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldlxr\" (UniqueName: \"kubernetes.io/projected/aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6-kube-api-access-ldlxr\") pod \"oauth-openshift-7557fdbcd4-62gl6\" (UID: \"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6\") " pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:41 crc kubenswrapper[4919]: I0930 20:17:41.782575 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:42 crc kubenswrapper[4919]: I0930 20:17:42.262873 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7557fdbcd4-62gl6"] Sep 30 20:17:42 crc kubenswrapper[4919]: I0930 20:17:42.295514 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" event={"ID":"42c8738a-aad8-4cc5-b18f-92eee2745673","Type":"ContainerDied","Data":"2f5cbde3edbd851149a311baa1aa43377817ae541c558eb995685f3468fb69d0"} Sep 30 20:17:42 crc kubenswrapper[4919]: I0930 20:17:42.296003 4919 scope.go:117] "RemoveContainer" containerID="1857921e867b9189272ed41ddf38fe806199c5609c64beb36763c520478164d1" Sep 30 20:17:42 crc kubenswrapper[4919]: I0930 20:17:42.296577 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-s5kls" Sep 30 20:17:42 crc kubenswrapper[4919]: I0930 20:17:42.307411 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" event={"ID":"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6","Type":"ContainerStarted","Data":"6a7882fd300476ed7aae0a9771cfdc987ffd21254bdd136a717ce7423980d717"} Sep 30 20:17:42 crc kubenswrapper[4919]: I0930 20:17:42.334253 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-s5kls"] Sep 30 20:17:42 crc kubenswrapper[4919]: I0930 20:17:42.338315 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-s5kls"] Sep 30 20:17:43 crc kubenswrapper[4919]: I0930 20:17:43.319824 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" event={"ID":"aa19fe2c-fa7e-4317-9a0a-65237a1fb6d6","Type":"ContainerStarted","Data":"58aad357e906356db31d8bdc3fa36ea2aa255a8a0cc6bb0677ced2cde22a1774"} Sep 30 20:17:43 crc kubenswrapper[4919]: I0930 20:17:43.320166 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:43 crc kubenswrapper[4919]: I0930 20:17:43.330491 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" Sep 30 20:17:43 crc kubenswrapper[4919]: I0930 20:17:43.357857 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7557fdbcd4-62gl6" podStartSLOduration=27.357824744 podStartE2EDuration="27.357824744s" podCreationTimestamp="2025-09-30 20:17:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:17:43.353053995 +0000 UTC m=+248.469087152" watchObservedRunningTime="2025-09-30 20:17:43.357824744 +0000 UTC m=+248.473857911" Sep 30 20:17:43 crc kubenswrapper[4919]: I0930 20:17:43.637870 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42c8738a-aad8-4cc5-b18f-92eee2745673" path="/var/lib/kubelet/pods/42c8738a-aad8-4cc5-b18f-92eee2745673/volumes" Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.648096 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7f7p6"] Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.649287 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7f7p6" podUID="283bf417-5302-4743-8a44-76fa61eba04b" containerName="registry-server" containerID="cri-o://c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa" gracePeriod=30 Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.656889 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-54vv5"] Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.657106 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-54vv5" podUID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" containerName="registry-server" containerID="cri-o://113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d" gracePeriod=30 Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.679923 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-khpgg"] Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.680158 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" podUID="23070aa6-f355-494e-b108-a3fba285cd2c" containerName="marketplace-operator" containerID="cri-o://0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b" gracePeriod=30 Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.689533 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-98qcl"] Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.689837 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-98qcl" podUID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" containerName="registry-server" containerID="cri-o://d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232" gracePeriod=30 Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.695280 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-v29xb"] Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.696116 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.704487 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p6xv9"] Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.704780 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p6xv9" podUID="d8339c07-c7d4-4da0-8927-b99887894379" containerName="registry-server" containerID="cri-o://17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d" gracePeriod=30 Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.706786 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-v29xb"] Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.843360 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f439d99-db12-43ef-bf75-48e46588d67b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-v29xb\" (UID: \"4f439d99-db12-43ef-bf75-48e46588d67b\") " pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.843636 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q5km\" (UniqueName: \"kubernetes.io/projected/4f439d99-db12-43ef-bf75-48e46588d67b-kube-api-access-5q5km\") pod \"marketplace-operator-79b997595-v29xb\" (UID: \"4f439d99-db12-43ef-bf75-48e46588d67b\") " pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.843691 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4f439d99-db12-43ef-bf75-48e46588d67b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-v29xb\" (UID: \"4f439d99-db12-43ef-bf75-48e46588d67b\") " pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.944525 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f439d99-db12-43ef-bf75-48e46588d67b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-v29xb\" (UID: \"4f439d99-db12-43ef-bf75-48e46588d67b\") " pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.944587 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q5km\" (UniqueName: \"kubernetes.io/projected/4f439d99-db12-43ef-bf75-48e46588d67b-kube-api-access-5q5km\") pod \"marketplace-operator-79b997595-v29xb\" (UID: \"4f439d99-db12-43ef-bf75-48e46588d67b\") " pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.944628 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4f439d99-db12-43ef-bf75-48e46588d67b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-v29xb\" (UID: \"4f439d99-db12-43ef-bf75-48e46588d67b\") " pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.947932 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4f439d99-db12-43ef-bf75-48e46588d67b-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-v29xb\" (UID: \"4f439d99-db12-43ef-bf75-48e46588d67b\") " pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.950667 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4f439d99-db12-43ef-bf75-48e46588d67b-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-v29xb\" (UID: \"4f439d99-db12-43ef-bf75-48e46588d67b\") " pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:17:58 crc kubenswrapper[4919]: I0930 20:17:58.964177 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q5km\" (UniqueName: \"kubernetes.io/projected/4f439d99-db12-43ef-bf75-48e46588d67b-kube-api-access-5q5km\") pod \"marketplace-operator-79b997595-v29xb\" (UID: \"4f439d99-db12-43ef-bf75-48e46588d67b\") " pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.047727 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.074854 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.074967 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.126009 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.136196 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.141242 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.249049 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6n2x9\" (UniqueName: \"kubernetes.io/projected/82021dd4-fbb8-4832-a38d-cd00aa9d786b-kube-api-access-6n2x9\") pod \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\" (UID: \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.249407 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8339c07-c7d4-4da0-8927-b99887894379-utilities\") pod \"d8339c07-c7d4-4da0-8927-b99887894379\" (UID: \"d8339c07-c7d4-4da0-8927-b99887894379\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.249442 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/23070aa6-f355-494e-b108-a3fba285cd2c-marketplace-trusted-ca\") pod \"23070aa6-f355-494e-b108-a3fba285cd2c\" (UID: \"23070aa6-f355-494e-b108-a3fba285cd2c\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.249458 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jlzp\" (UniqueName: \"kubernetes.io/projected/f4ca8312-ff90-418e-8503-8acc3f9d63b1-kube-api-access-4jlzp\") pod \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\" (UID: \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.249482 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d726b\" (UniqueName: \"kubernetes.io/projected/283bf417-5302-4743-8a44-76fa61eba04b-kube-api-access-d726b\") pod \"283bf417-5302-4743-8a44-76fa61eba04b\" (UID: \"283bf417-5302-4743-8a44-76fa61eba04b\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.250161 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23070aa6-f355-494e-b108-a3fba285cd2c-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "23070aa6-f355-494e-b108-a3fba285cd2c" (UID: "23070aa6-f355-494e-b108-a3fba285cd2c"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.250335 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8339c07-c7d4-4da0-8927-b99887894379-utilities" (OuterVolumeSpecName: "utilities") pod "d8339c07-c7d4-4da0-8927-b99887894379" (UID: "d8339c07-c7d4-4da0-8927-b99887894379"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.252296 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8339c07-c7d4-4da0-8927-b99887894379-catalog-content\") pod \"d8339c07-c7d4-4da0-8927-b99887894379\" (UID: \"d8339c07-c7d4-4da0-8927-b99887894379\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.252390 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/283bf417-5302-4743-8a44-76fa61eba04b-catalog-content\") pod \"283bf417-5302-4743-8a44-76fa61eba04b\" (UID: \"283bf417-5302-4743-8a44-76fa61eba04b\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.252441 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44xq9\" (UniqueName: \"kubernetes.io/projected/23070aa6-f355-494e-b108-a3fba285cd2c-kube-api-access-44xq9\") pod \"23070aa6-f355-494e-b108-a3fba285cd2c\" (UID: \"23070aa6-f355-494e-b108-a3fba285cd2c\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.252462 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4ca8312-ff90-418e-8503-8acc3f9d63b1-catalog-content\") pod \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\" (UID: \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.252493 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/283bf417-5302-4743-8a44-76fa61eba04b-utilities\") pod \"283bf417-5302-4743-8a44-76fa61eba04b\" (UID: \"283bf417-5302-4743-8a44-76fa61eba04b\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.252514 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4ca8312-ff90-418e-8503-8acc3f9d63b1-utilities\") pod \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\" (UID: \"f4ca8312-ff90-418e-8503-8acc3f9d63b1\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.252537 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/23070aa6-f355-494e-b108-a3fba285cd2c-marketplace-operator-metrics\") pod \"23070aa6-f355-494e-b108-a3fba285cd2c\" (UID: \"23070aa6-f355-494e-b108-a3fba285cd2c\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.252558 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82021dd4-fbb8-4832-a38d-cd00aa9d786b-catalog-content\") pod \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\" (UID: \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.252577 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfv7k\" (UniqueName: \"kubernetes.io/projected/d8339c07-c7d4-4da0-8927-b99887894379-kube-api-access-qfv7k\") pod \"d8339c07-c7d4-4da0-8927-b99887894379\" (UID: \"d8339c07-c7d4-4da0-8927-b99887894379\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.252605 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82021dd4-fbb8-4832-a38d-cd00aa9d786b-utilities\") pod \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\" (UID: \"82021dd4-fbb8-4832-a38d-cd00aa9d786b\") " Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.253002 4919 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/23070aa6-f355-494e-b108-a3fba285cd2c-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.253018 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8339c07-c7d4-4da0-8927-b99887894379-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.253309 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4ca8312-ff90-418e-8503-8acc3f9d63b1-utilities" (OuterVolumeSpecName: "utilities") pod "f4ca8312-ff90-418e-8503-8acc3f9d63b1" (UID: "f4ca8312-ff90-418e-8503-8acc3f9d63b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.254143 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/283bf417-5302-4743-8a44-76fa61eba04b-utilities" (OuterVolumeSpecName: "utilities") pod "283bf417-5302-4743-8a44-76fa61eba04b" (UID: "283bf417-5302-4743-8a44-76fa61eba04b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.254736 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/283bf417-5302-4743-8a44-76fa61eba04b-kube-api-access-d726b" (OuterVolumeSpecName: "kube-api-access-d726b") pod "283bf417-5302-4743-8a44-76fa61eba04b" (UID: "283bf417-5302-4743-8a44-76fa61eba04b"). InnerVolumeSpecName "kube-api-access-d726b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.255135 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23070aa6-f355-494e-b108-a3fba285cd2c-kube-api-access-44xq9" (OuterVolumeSpecName: "kube-api-access-44xq9") pod "23070aa6-f355-494e-b108-a3fba285cd2c" (UID: "23070aa6-f355-494e-b108-a3fba285cd2c"). InnerVolumeSpecName "kube-api-access-44xq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.255282 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82021dd4-fbb8-4832-a38d-cd00aa9d786b-utilities" (OuterVolumeSpecName: "utilities") pod "82021dd4-fbb8-4832-a38d-cd00aa9d786b" (UID: "82021dd4-fbb8-4832-a38d-cd00aa9d786b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.256790 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8339c07-c7d4-4da0-8927-b99887894379-kube-api-access-qfv7k" (OuterVolumeSpecName: "kube-api-access-qfv7k") pod "d8339c07-c7d4-4da0-8927-b99887894379" (UID: "d8339c07-c7d4-4da0-8927-b99887894379"). InnerVolumeSpecName "kube-api-access-qfv7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.257405 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23070aa6-f355-494e-b108-a3fba285cd2c-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "23070aa6-f355-494e-b108-a3fba285cd2c" (UID: "23070aa6-f355-494e-b108-a3fba285cd2c"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.263930 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4ca8312-ff90-418e-8503-8acc3f9d63b1-kube-api-access-4jlzp" (OuterVolumeSpecName: "kube-api-access-4jlzp") pod "f4ca8312-ff90-418e-8503-8acc3f9d63b1" (UID: "f4ca8312-ff90-418e-8503-8acc3f9d63b1"). InnerVolumeSpecName "kube-api-access-4jlzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.263898 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82021dd4-fbb8-4832-a38d-cd00aa9d786b-kube-api-access-6n2x9" (OuterVolumeSpecName: "kube-api-access-6n2x9") pod "82021dd4-fbb8-4832-a38d-cd00aa9d786b" (UID: "82021dd4-fbb8-4832-a38d-cd00aa9d786b"). InnerVolumeSpecName "kube-api-access-6n2x9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.277321 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82021dd4-fbb8-4832-a38d-cd00aa9d786b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "82021dd4-fbb8-4832-a38d-cd00aa9d786b" (UID: "82021dd4-fbb8-4832-a38d-cd00aa9d786b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.318607 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4ca8312-ff90-418e-8503-8acc3f9d63b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4ca8312-ff90-418e-8503-8acc3f9d63b1" (UID: "f4ca8312-ff90-418e-8503-8acc3f9d63b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.323617 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/283bf417-5302-4743-8a44-76fa61eba04b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "283bf417-5302-4743-8a44-76fa61eba04b" (UID: "283bf417-5302-4743-8a44-76fa61eba04b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.354457 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44xq9\" (UniqueName: \"kubernetes.io/projected/23070aa6-f355-494e-b108-a3fba285cd2c-kube-api-access-44xq9\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.354493 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4ca8312-ff90-418e-8503-8acc3f9d63b1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.354504 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/283bf417-5302-4743-8a44-76fa61eba04b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.354520 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4ca8312-ff90-418e-8503-8acc3f9d63b1-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.354532 4919 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/23070aa6-f355-494e-b108-a3fba285cd2c-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.354544 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82021dd4-fbb8-4832-a38d-cd00aa9d786b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.354554 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfv7k\" (UniqueName: \"kubernetes.io/projected/d8339c07-c7d4-4da0-8927-b99887894379-kube-api-access-qfv7k\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.354567 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82021dd4-fbb8-4832-a38d-cd00aa9d786b-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.354577 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6n2x9\" (UniqueName: \"kubernetes.io/projected/82021dd4-fbb8-4832-a38d-cd00aa9d786b-kube-api-access-6n2x9\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.354588 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jlzp\" (UniqueName: \"kubernetes.io/projected/f4ca8312-ff90-418e-8503-8acc3f9d63b1-kube-api-access-4jlzp\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.354601 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d726b\" (UniqueName: \"kubernetes.io/projected/283bf417-5302-4743-8a44-76fa61eba04b-kube-api-access-d726b\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.354612 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/283bf417-5302-4743-8a44-76fa61eba04b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.355149 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8339c07-c7d4-4da0-8927-b99887894379-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8339c07-c7d4-4da0-8927-b99887894379" (UID: "d8339c07-c7d4-4da0-8927-b99887894379"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.413435 4919 generic.go:334] "Generic (PLEG): container finished" podID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" containerID="113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d" exitCode=0 Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.413487 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-54vv5" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.413522 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-54vv5" event={"ID":"f4ca8312-ff90-418e-8503-8acc3f9d63b1","Type":"ContainerDied","Data":"113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d"} Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.413557 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-54vv5" event={"ID":"f4ca8312-ff90-418e-8503-8acc3f9d63b1","Type":"ContainerDied","Data":"a98613a90b7ee027ea7dd461259eee3495b6c340c831a001850c89ad55564fda"} Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.413574 4919 scope.go:117] "RemoveContainer" containerID="113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.416552 4919 generic.go:334] "Generic (PLEG): container finished" podID="23070aa6-f355-494e-b108-a3fba285cd2c" containerID="0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b" exitCode=0 Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.416604 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.416635 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" event={"ID":"23070aa6-f355-494e-b108-a3fba285cd2c","Type":"ContainerDied","Data":"0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b"} Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.416679 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-khpgg" event={"ID":"23070aa6-f355-494e-b108-a3fba285cd2c","Type":"ContainerDied","Data":"f96f605ae859fffe77f0ccc6e6787634a65f26b077ffa253d985401f76527700"} Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.420830 4919 generic.go:334] "Generic (PLEG): container finished" podID="d8339c07-c7d4-4da0-8927-b99887894379" containerID="17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d" exitCode=0 Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.420934 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p6xv9" event={"ID":"d8339c07-c7d4-4da0-8927-b99887894379","Type":"ContainerDied","Data":"17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d"} Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.420976 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p6xv9" event={"ID":"d8339c07-c7d4-4da0-8927-b99887894379","Type":"ContainerDied","Data":"590fbfd1f67fc384fee26f774d8f8dd1afe8c8416c3069af2a563973e92cd36f"} Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.421068 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p6xv9" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.433517 4919 generic.go:334] "Generic (PLEG): container finished" podID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" containerID="d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232" exitCode=0 Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.433668 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-98qcl" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.433810 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98qcl" event={"ID":"82021dd4-fbb8-4832-a38d-cd00aa9d786b","Type":"ContainerDied","Data":"d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232"} Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.433906 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-98qcl" event={"ID":"82021dd4-fbb8-4832-a38d-cd00aa9d786b","Type":"ContainerDied","Data":"26bfd5c31d9d68d0bc4d381d6d1f0732f4d78a4cc8e9f51621dd96787e61dc95"} Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.440239 4919 scope.go:117] "RemoveContainer" containerID="249153e49c3443c86484419db7ff817559d9236d721a2e2ad2f6e307029230e1" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.449337 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-54vv5"] Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.450094 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7f7p6" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.450115 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f7p6" event={"ID":"283bf417-5302-4743-8a44-76fa61eba04b","Type":"ContainerDied","Data":"c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa"} Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.451463 4919 generic.go:334] "Generic (PLEG): container finished" podID="283bf417-5302-4743-8a44-76fa61eba04b" containerID="c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa" exitCode=0 Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.451847 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7f7p6" event={"ID":"283bf417-5302-4743-8a44-76fa61eba04b","Type":"ContainerDied","Data":"c41f811134543cf3b24f6bfc238bc8d2c37dfe3fd54b513f178dedb3971134eb"} Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.455042 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8339c07-c7d4-4da0-8927-b99887894379-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.455947 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-54vv5"] Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.475977 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-khpgg"] Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.478222 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-khpgg"] Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.485098 4919 scope.go:117] "RemoveContainer" containerID="c7586d6a23026556ee67456961b18d026df8f9bb465e425f2e2eecf7d18260d9" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.488104 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-98qcl"] Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.498439 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-98qcl"] Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.502649 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p6xv9"] Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.508637 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p6xv9"] Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.512733 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7f7p6"] Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.517850 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7f7p6"] Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.521583 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-v29xb"] Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.525708 4919 scope.go:117] "RemoveContainer" containerID="113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.526172 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d\": container with ID starting with 113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d not found: ID does not exist" containerID="113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.526226 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d"} err="failed to get container status \"113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d\": rpc error: code = NotFound desc = could not find container \"113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d\": container with ID starting with 113e8588aa3b8198f3fb59116289163864403146712deda724dd8cd0b7713c6d not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.526255 4919 scope.go:117] "RemoveContainer" containerID="249153e49c3443c86484419db7ff817559d9236d721a2e2ad2f6e307029230e1" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.526520 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"249153e49c3443c86484419db7ff817559d9236d721a2e2ad2f6e307029230e1\": container with ID starting with 249153e49c3443c86484419db7ff817559d9236d721a2e2ad2f6e307029230e1 not found: ID does not exist" containerID="249153e49c3443c86484419db7ff817559d9236d721a2e2ad2f6e307029230e1" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.526539 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"249153e49c3443c86484419db7ff817559d9236d721a2e2ad2f6e307029230e1"} err="failed to get container status \"249153e49c3443c86484419db7ff817559d9236d721a2e2ad2f6e307029230e1\": rpc error: code = NotFound desc = could not find container \"249153e49c3443c86484419db7ff817559d9236d721a2e2ad2f6e307029230e1\": container with ID starting with 249153e49c3443c86484419db7ff817559d9236d721a2e2ad2f6e307029230e1 not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.526554 4919 scope.go:117] "RemoveContainer" containerID="c7586d6a23026556ee67456961b18d026df8f9bb465e425f2e2eecf7d18260d9" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.526794 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7586d6a23026556ee67456961b18d026df8f9bb465e425f2e2eecf7d18260d9\": container with ID starting with c7586d6a23026556ee67456961b18d026df8f9bb465e425f2e2eecf7d18260d9 not found: ID does not exist" containerID="c7586d6a23026556ee67456961b18d026df8f9bb465e425f2e2eecf7d18260d9" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.526845 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7586d6a23026556ee67456961b18d026df8f9bb465e425f2e2eecf7d18260d9"} err="failed to get container status \"c7586d6a23026556ee67456961b18d026df8f9bb465e425f2e2eecf7d18260d9\": rpc error: code = NotFound desc = could not find container \"c7586d6a23026556ee67456961b18d026df8f9bb465e425f2e2eecf7d18260d9\": container with ID starting with c7586d6a23026556ee67456961b18d026df8f9bb465e425f2e2eecf7d18260d9 not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.526864 4919 scope.go:117] "RemoveContainer" containerID="0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.542780 4919 scope.go:117] "RemoveContainer" containerID="0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.543663 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b\": container with ID starting with 0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b not found: ID does not exist" containerID="0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.543704 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b"} err="failed to get container status \"0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b\": rpc error: code = NotFound desc = could not find container \"0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b\": container with ID starting with 0acb956483481376d88d5dea1576619db6bdc596d9ca926a31b0f98a081d393b not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.543774 4919 scope.go:117] "RemoveContainer" containerID="17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.560579 4919 scope.go:117] "RemoveContainer" containerID="aed8615f1a95c7659fc3e9408946272aa46a8f9578934cf3b761828c50c490c1" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.575949 4919 scope.go:117] "RemoveContainer" containerID="214c24bc401681dfa5a8eafc7f57325e5da0b8a12b74a576376de0f7cb9f2812" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.611818 4919 scope.go:117] "RemoveContainer" containerID="17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.612187 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d\": container with ID starting with 17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d not found: ID does not exist" containerID="17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.612260 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d"} err="failed to get container status \"17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d\": rpc error: code = NotFound desc = could not find container \"17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d\": container with ID starting with 17d3fa13074af8ce53de605424e415516385367eff6d51571df3fcc45de3205d not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.612297 4919 scope.go:117] "RemoveContainer" containerID="aed8615f1a95c7659fc3e9408946272aa46a8f9578934cf3b761828c50c490c1" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.612727 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aed8615f1a95c7659fc3e9408946272aa46a8f9578934cf3b761828c50c490c1\": container with ID starting with aed8615f1a95c7659fc3e9408946272aa46a8f9578934cf3b761828c50c490c1 not found: ID does not exist" containerID="aed8615f1a95c7659fc3e9408946272aa46a8f9578934cf3b761828c50c490c1" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.612757 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aed8615f1a95c7659fc3e9408946272aa46a8f9578934cf3b761828c50c490c1"} err="failed to get container status \"aed8615f1a95c7659fc3e9408946272aa46a8f9578934cf3b761828c50c490c1\": rpc error: code = NotFound desc = could not find container \"aed8615f1a95c7659fc3e9408946272aa46a8f9578934cf3b761828c50c490c1\": container with ID starting with aed8615f1a95c7659fc3e9408946272aa46a8f9578934cf3b761828c50c490c1 not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.612778 4919 scope.go:117] "RemoveContainer" containerID="214c24bc401681dfa5a8eafc7f57325e5da0b8a12b74a576376de0f7cb9f2812" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.613084 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"214c24bc401681dfa5a8eafc7f57325e5da0b8a12b74a576376de0f7cb9f2812\": container with ID starting with 214c24bc401681dfa5a8eafc7f57325e5da0b8a12b74a576376de0f7cb9f2812 not found: ID does not exist" containerID="214c24bc401681dfa5a8eafc7f57325e5da0b8a12b74a576376de0f7cb9f2812" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.613130 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"214c24bc401681dfa5a8eafc7f57325e5da0b8a12b74a576376de0f7cb9f2812"} err="failed to get container status \"214c24bc401681dfa5a8eafc7f57325e5da0b8a12b74a576376de0f7cb9f2812\": rpc error: code = NotFound desc = could not find container \"214c24bc401681dfa5a8eafc7f57325e5da0b8a12b74a576376de0f7cb9f2812\": container with ID starting with 214c24bc401681dfa5a8eafc7f57325e5da0b8a12b74a576376de0f7cb9f2812 not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.613177 4919 scope.go:117] "RemoveContainer" containerID="d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.626619 4919 scope.go:117] "RemoveContainer" containerID="3909b2e54f8a36162f2dead8f406ebb07a32e758fdf84a1c87c54bacb8ada6ad" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.638565 4919 scope.go:117] "RemoveContainer" containerID="d788dc51b20b4d24286faa632c311de9b067999daf30fb3deb69b49e8d59be4c" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.639318 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23070aa6-f355-494e-b108-a3fba285cd2c" path="/var/lib/kubelet/pods/23070aa6-f355-494e-b108-a3fba285cd2c/volumes" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.640177 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="283bf417-5302-4743-8a44-76fa61eba04b" path="/var/lib/kubelet/pods/283bf417-5302-4743-8a44-76fa61eba04b/volumes" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.641032 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" path="/var/lib/kubelet/pods/82021dd4-fbb8-4832-a38d-cd00aa9d786b/volumes" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.642627 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8339c07-c7d4-4da0-8927-b99887894379" path="/var/lib/kubelet/pods/d8339c07-c7d4-4da0-8927-b99887894379/volumes" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.643429 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" path="/var/lib/kubelet/pods/f4ca8312-ff90-418e-8503-8acc3f9d63b1/volumes" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.653877 4919 scope.go:117] "RemoveContainer" containerID="d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.655951 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232\": container with ID starting with d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232 not found: ID does not exist" containerID="d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.656013 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232"} err="failed to get container status \"d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232\": rpc error: code = NotFound desc = could not find container \"d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232\": container with ID starting with d6a2798779af787cdd37861777c269a0241c2c7f15dd712e76412e0887d1d232 not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.656059 4919 scope.go:117] "RemoveContainer" containerID="3909b2e54f8a36162f2dead8f406ebb07a32e758fdf84a1c87c54bacb8ada6ad" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.656540 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3909b2e54f8a36162f2dead8f406ebb07a32e758fdf84a1c87c54bacb8ada6ad\": container with ID starting with 3909b2e54f8a36162f2dead8f406ebb07a32e758fdf84a1c87c54bacb8ada6ad not found: ID does not exist" containerID="3909b2e54f8a36162f2dead8f406ebb07a32e758fdf84a1c87c54bacb8ada6ad" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.656585 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3909b2e54f8a36162f2dead8f406ebb07a32e758fdf84a1c87c54bacb8ada6ad"} err="failed to get container status \"3909b2e54f8a36162f2dead8f406ebb07a32e758fdf84a1c87c54bacb8ada6ad\": rpc error: code = NotFound desc = could not find container \"3909b2e54f8a36162f2dead8f406ebb07a32e758fdf84a1c87c54bacb8ada6ad\": container with ID starting with 3909b2e54f8a36162f2dead8f406ebb07a32e758fdf84a1c87c54bacb8ada6ad not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.656616 4919 scope.go:117] "RemoveContainer" containerID="d788dc51b20b4d24286faa632c311de9b067999daf30fb3deb69b49e8d59be4c" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.656991 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d788dc51b20b4d24286faa632c311de9b067999daf30fb3deb69b49e8d59be4c\": container with ID starting with d788dc51b20b4d24286faa632c311de9b067999daf30fb3deb69b49e8d59be4c not found: ID does not exist" containerID="d788dc51b20b4d24286faa632c311de9b067999daf30fb3deb69b49e8d59be4c" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.657025 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d788dc51b20b4d24286faa632c311de9b067999daf30fb3deb69b49e8d59be4c"} err="failed to get container status \"d788dc51b20b4d24286faa632c311de9b067999daf30fb3deb69b49e8d59be4c\": rpc error: code = NotFound desc = could not find container \"d788dc51b20b4d24286faa632c311de9b067999daf30fb3deb69b49e8d59be4c\": container with ID starting with d788dc51b20b4d24286faa632c311de9b067999daf30fb3deb69b49e8d59be4c not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.657066 4919 scope.go:117] "RemoveContainer" containerID="c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.670962 4919 scope.go:117] "RemoveContainer" containerID="22c7e5d19ae099b4cd141324cedcc19b950625ddfe50767baa8a7d1389810baa" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.686764 4919 scope.go:117] "RemoveContainer" containerID="6056abfe9cbd192ca6743df484aaafc5ef89e4d90b8426c4cd252486da3c99bf" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.700477 4919 scope.go:117] "RemoveContainer" containerID="c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.701017 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa\": container with ID starting with c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa not found: ID does not exist" containerID="c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.701052 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa"} err="failed to get container status \"c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa\": rpc error: code = NotFound desc = could not find container \"c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa\": container with ID starting with c8297343be8edf17dabdc3ef2b94676bd4306dc0f4f8f47972b3b96242d33daa not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.701082 4919 scope.go:117] "RemoveContainer" containerID="22c7e5d19ae099b4cd141324cedcc19b950625ddfe50767baa8a7d1389810baa" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.701520 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22c7e5d19ae099b4cd141324cedcc19b950625ddfe50767baa8a7d1389810baa\": container with ID starting with 22c7e5d19ae099b4cd141324cedcc19b950625ddfe50767baa8a7d1389810baa not found: ID does not exist" containerID="22c7e5d19ae099b4cd141324cedcc19b950625ddfe50767baa8a7d1389810baa" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.701542 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22c7e5d19ae099b4cd141324cedcc19b950625ddfe50767baa8a7d1389810baa"} err="failed to get container status \"22c7e5d19ae099b4cd141324cedcc19b950625ddfe50767baa8a7d1389810baa\": rpc error: code = NotFound desc = could not find container \"22c7e5d19ae099b4cd141324cedcc19b950625ddfe50767baa8a7d1389810baa\": container with ID starting with 22c7e5d19ae099b4cd141324cedcc19b950625ddfe50767baa8a7d1389810baa not found: ID does not exist" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.701555 4919 scope.go:117] "RemoveContainer" containerID="6056abfe9cbd192ca6743df484aaafc5ef89e4d90b8426c4cd252486da3c99bf" Sep 30 20:17:59 crc kubenswrapper[4919]: E0930 20:17:59.701933 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6056abfe9cbd192ca6743df484aaafc5ef89e4d90b8426c4cd252486da3c99bf\": container with ID starting with 6056abfe9cbd192ca6743df484aaafc5ef89e4d90b8426c4cd252486da3c99bf not found: ID does not exist" containerID="6056abfe9cbd192ca6743df484aaafc5ef89e4d90b8426c4cd252486da3c99bf" Sep 30 20:17:59 crc kubenswrapper[4919]: I0930 20:17:59.701964 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6056abfe9cbd192ca6743df484aaafc5ef89e4d90b8426c4cd252486da3c99bf"} err="failed to get container status \"6056abfe9cbd192ca6743df484aaafc5ef89e4d90b8426c4cd252486da3c99bf\": rpc error: code = NotFound desc = could not find container \"6056abfe9cbd192ca6743df484aaafc5ef89e4d90b8426c4cd252486da3c99bf\": container with ID starting with 6056abfe9cbd192ca6743df484aaafc5ef89e4d90b8426c4cd252486da3c99bf not found: ID does not exist" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.463158 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" event={"ID":"4f439d99-db12-43ef-bf75-48e46588d67b","Type":"ContainerStarted","Data":"d1d98efe882735e97000b857979fd2b5e24b57ad6caaf322829ca5a577fcaac9"} Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.463566 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.463590 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" event={"ID":"4f439d99-db12-43ef-bf75-48e46588d67b","Type":"ContainerStarted","Data":"feec4ff5a9d4ae73375c689a9623fa88d9623289f908a6776c0c4828084c83c7"} Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.469126 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.480612 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-v29xb" podStartSLOduration=2.480589648 podStartE2EDuration="2.480589648s" podCreationTimestamp="2025-09-30 20:17:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:18:00.476957569 +0000 UTC m=+265.592990726" watchObservedRunningTime="2025-09-30 20:18:00.480589648 +0000 UTC m=+265.596622805" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870111 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4gtrb"] Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870719 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" containerName="extract-content" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870734 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" containerName="extract-content" Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870749 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" containerName="registry-server" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870755 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" containerName="registry-server" Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870767 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="283bf417-5302-4743-8a44-76fa61eba04b" containerName="extract-content" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870775 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="283bf417-5302-4743-8a44-76fa61eba04b" containerName="extract-content" Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870789 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8339c07-c7d4-4da0-8927-b99887894379" containerName="registry-server" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870795 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8339c07-c7d4-4da0-8927-b99887894379" containerName="registry-server" Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870813 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8339c07-c7d4-4da0-8927-b99887894379" containerName="extract-content" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870820 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8339c07-c7d4-4da0-8927-b99887894379" containerName="extract-content" Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870829 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" containerName="extract-utilities" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870834 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" containerName="extract-utilities" Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870851 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" containerName="extract-utilities" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870857 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" containerName="extract-utilities" Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870869 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" containerName="registry-server" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870876 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" containerName="registry-server" Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870889 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" containerName="extract-content" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870894 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" containerName="extract-content" Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870907 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23070aa6-f355-494e-b108-a3fba285cd2c" containerName="marketplace-operator" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870916 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="23070aa6-f355-494e-b108-a3fba285cd2c" containerName="marketplace-operator" Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870926 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="283bf417-5302-4743-8a44-76fa61eba04b" containerName="extract-utilities" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870959 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="283bf417-5302-4743-8a44-76fa61eba04b" containerName="extract-utilities" Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870974 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="283bf417-5302-4743-8a44-76fa61eba04b" containerName="registry-server" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.870983 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="283bf417-5302-4743-8a44-76fa61eba04b" containerName="registry-server" Sep 30 20:18:00 crc kubenswrapper[4919]: E0930 20:18:00.870997 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8339c07-c7d4-4da0-8927-b99887894379" containerName="extract-utilities" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.871006 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8339c07-c7d4-4da0-8927-b99887894379" containerName="extract-utilities" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.871430 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="283bf417-5302-4743-8a44-76fa61eba04b" containerName="registry-server" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.871450 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="23070aa6-f355-494e-b108-a3fba285cd2c" containerName="marketplace-operator" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.871464 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8339c07-c7d4-4da0-8927-b99887894379" containerName="registry-server" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.871478 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="82021dd4-fbb8-4832-a38d-cd00aa9d786b" containerName="registry-server" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.871488 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4ca8312-ff90-418e-8503-8acc3f9d63b1" containerName="registry-server" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.873644 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.880340 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 30 20:18:00 crc kubenswrapper[4919]: I0930 20:18:00.888348 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4gtrb"] Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.066339 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pbrx9"] Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.067249 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.069602 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.072437 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pbrx9"] Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.073141 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50-utilities\") pod \"redhat-marketplace-pbrx9\" (UID: \"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50\") " pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.073200 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wmkf\" (UniqueName: \"kubernetes.io/projected/2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50-kube-api-access-4wmkf\") pod \"redhat-marketplace-pbrx9\" (UID: \"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50\") " pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.073283 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dcc4f23-6453-44f6-943a-0a79f2f6e224-catalog-content\") pod \"certified-operators-4gtrb\" (UID: \"1dcc4f23-6453-44f6-943a-0a79f2f6e224\") " pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.073362 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dcc4f23-6453-44f6-943a-0a79f2f6e224-utilities\") pod \"certified-operators-4gtrb\" (UID: \"1dcc4f23-6453-44f6-943a-0a79f2f6e224\") " pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.073394 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50-catalog-content\") pod \"redhat-marketplace-pbrx9\" (UID: \"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50\") " pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.073413 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g28fl\" (UniqueName: \"kubernetes.io/projected/1dcc4f23-6453-44f6-943a-0a79f2f6e224-kube-api-access-g28fl\") pod \"certified-operators-4gtrb\" (UID: \"1dcc4f23-6453-44f6-943a-0a79f2f6e224\") " pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.174905 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dcc4f23-6453-44f6-943a-0a79f2f6e224-utilities\") pod \"certified-operators-4gtrb\" (UID: \"1dcc4f23-6453-44f6-943a-0a79f2f6e224\") " pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.174978 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50-catalog-content\") pod \"redhat-marketplace-pbrx9\" (UID: \"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50\") " pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.175010 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g28fl\" (UniqueName: \"kubernetes.io/projected/1dcc4f23-6453-44f6-943a-0a79f2f6e224-kube-api-access-g28fl\") pod \"certified-operators-4gtrb\" (UID: \"1dcc4f23-6453-44f6-943a-0a79f2f6e224\") " pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.175080 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50-utilities\") pod \"redhat-marketplace-pbrx9\" (UID: \"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50\") " pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.175120 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wmkf\" (UniqueName: \"kubernetes.io/projected/2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50-kube-api-access-4wmkf\") pod \"redhat-marketplace-pbrx9\" (UID: \"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50\") " pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.175154 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dcc4f23-6453-44f6-943a-0a79f2f6e224-catalog-content\") pod \"certified-operators-4gtrb\" (UID: \"1dcc4f23-6453-44f6-943a-0a79f2f6e224\") " pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.175483 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1dcc4f23-6453-44f6-943a-0a79f2f6e224-utilities\") pod \"certified-operators-4gtrb\" (UID: \"1dcc4f23-6453-44f6-943a-0a79f2f6e224\") " pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.175609 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1dcc4f23-6453-44f6-943a-0a79f2f6e224-catalog-content\") pod \"certified-operators-4gtrb\" (UID: \"1dcc4f23-6453-44f6-943a-0a79f2f6e224\") " pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.175616 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50-catalog-content\") pod \"redhat-marketplace-pbrx9\" (UID: \"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50\") " pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.175711 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50-utilities\") pod \"redhat-marketplace-pbrx9\" (UID: \"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50\") " pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.198659 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wmkf\" (UniqueName: \"kubernetes.io/projected/2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50-kube-api-access-4wmkf\") pod \"redhat-marketplace-pbrx9\" (UID: \"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50\") " pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.200146 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g28fl\" (UniqueName: \"kubernetes.io/projected/1dcc4f23-6453-44f6-943a-0a79f2f6e224-kube-api-access-g28fl\") pod \"certified-operators-4gtrb\" (UID: \"1dcc4f23-6453-44f6-943a-0a79f2f6e224\") " pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.240180 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.390803 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.578478 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pbrx9"] Sep 30 20:18:01 crc kubenswrapper[4919]: W0930 20:18:01.588191 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ce51e11_6e4a_4ffa_bf10_f8b8b50fda50.slice/crio-0715be46331541768538c43559b066223f400f53cd6160368d985613fe63283a WatchSource:0}: Error finding container 0715be46331541768538c43559b066223f400f53cd6160368d985613fe63283a: Status 404 returned error can't find the container with id 0715be46331541768538c43559b066223f400f53cd6160368d985613fe63283a Sep 30 20:18:01 crc kubenswrapper[4919]: I0930 20:18:01.659397 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4gtrb"] Sep 30 20:18:01 crc kubenswrapper[4919]: W0930 20:18:01.664955 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1dcc4f23_6453_44f6_943a_0a79f2f6e224.slice/crio-5f1b88117aa90e37994117e7bd6323008175669212cc2607b08de17873584452 WatchSource:0}: Error finding container 5f1b88117aa90e37994117e7bd6323008175669212cc2607b08de17873584452: Status 404 returned error can't find the container with id 5f1b88117aa90e37994117e7bd6323008175669212cc2607b08de17873584452 Sep 30 20:18:02 crc kubenswrapper[4919]: I0930 20:18:02.476014 4919 generic.go:334] "Generic (PLEG): container finished" podID="1dcc4f23-6453-44f6-943a-0a79f2f6e224" containerID="fa6b76fc6a75c006d0bd69e9ec52049e943278419fb2f62abc2d8cb14401b50d" exitCode=0 Sep 30 20:18:02 crc kubenswrapper[4919]: I0930 20:18:02.476084 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4gtrb" event={"ID":"1dcc4f23-6453-44f6-943a-0a79f2f6e224","Type":"ContainerDied","Data":"fa6b76fc6a75c006d0bd69e9ec52049e943278419fb2f62abc2d8cb14401b50d"} Sep 30 20:18:02 crc kubenswrapper[4919]: I0930 20:18:02.476111 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4gtrb" event={"ID":"1dcc4f23-6453-44f6-943a-0a79f2f6e224","Type":"ContainerStarted","Data":"5f1b88117aa90e37994117e7bd6323008175669212cc2607b08de17873584452"} Sep 30 20:18:02 crc kubenswrapper[4919]: I0930 20:18:02.478065 4919 generic.go:334] "Generic (PLEG): container finished" podID="2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50" containerID="840e86afc2a01c3d5f9fbc633826aef31361336fa82c7c653d16a55c5d30fe03" exitCode=0 Sep 30 20:18:02 crc kubenswrapper[4919]: I0930 20:18:02.478248 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pbrx9" event={"ID":"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50","Type":"ContainerDied","Data":"840e86afc2a01c3d5f9fbc633826aef31361336fa82c7c653d16a55c5d30fe03"} Sep 30 20:18:02 crc kubenswrapper[4919]: I0930 20:18:02.478302 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pbrx9" event={"ID":"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50","Type":"ContainerStarted","Data":"0715be46331541768538c43559b066223f400f53cd6160368d985613fe63283a"} Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.264764 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-f2vvm"] Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.266069 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.268764 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.279660 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f2vvm"] Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.406472 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4ckf\" (UniqueName: \"kubernetes.io/projected/f8b254d9-2040-4662-8949-eeeec8786ac3-kube-api-access-m4ckf\") pod \"redhat-operators-f2vvm\" (UID: \"f8b254d9-2040-4662-8949-eeeec8786ac3\") " pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.406915 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8b254d9-2040-4662-8949-eeeec8786ac3-utilities\") pod \"redhat-operators-f2vvm\" (UID: \"f8b254d9-2040-4662-8949-eeeec8786ac3\") " pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.406946 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8b254d9-2040-4662-8949-eeeec8786ac3-catalog-content\") pod \"redhat-operators-f2vvm\" (UID: \"f8b254d9-2040-4662-8949-eeeec8786ac3\") " pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.468647 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-76q7q"] Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.469789 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.471553 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.476480 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-76q7q"] Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.507597 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4ckf\" (UniqueName: \"kubernetes.io/projected/f8b254d9-2040-4662-8949-eeeec8786ac3-kube-api-access-m4ckf\") pod \"redhat-operators-f2vvm\" (UID: \"f8b254d9-2040-4662-8949-eeeec8786ac3\") " pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.507641 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p884c\" (UniqueName: \"kubernetes.io/projected/64dc7de7-32f2-49ae-9719-c347dd0f340a-kube-api-access-p884c\") pod \"community-operators-76q7q\" (UID: \"64dc7de7-32f2-49ae-9719-c347dd0f340a\") " pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.507687 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8b254d9-2040-4662-8949-eeeec8786ac3-utilities\") pod \"redhat-operators-f2vvm\" (UID: \"f8b254d9-2040-4662-8949-eeeec8786ac3\") " pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.507705 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64dc7de7-32f2-49ae-9719-c347dd0f340a-utilities\") pod \"community-operators-76q7q\" (UID: \"64dc7de7-32f2-49ae-9719-c347dd0f340a\") " pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.507785 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8b254d9-2040-4662-8949-eeeec8786ac3-catalog-content\") pod \"redhat-operators-f2vvm\" (UID: \"f8b254d9-2040-4662-8949-eeeec8786ac3\") " pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.507841 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64dc7de7-32f2-49ae-9719-c347dd0f340a-catalog-content\") pod \"community-operators-76q7q\" (UID: \"64dc7de7-32f2-49ae-9719-c347dd0f340a\") " pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.508094 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f8b254d9-2040-4662-8949-eeeec8786ac3-utilities\") pod \"redhat-operators-f2vvm\" (UID: \"f8b254d9-2040-4662-8949-eeeec8786ac3\") " pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.508241 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f8b254d9-2040-4662-8949-eeeec8786ac3-catalog-content\") pod \"redhat-operators-f2vvm\" (UID: \"f8b254d9-2040-4662-8949-eeeec8786ac3\") " pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.530468 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4ckf\" (UniqueName: \"kubernetes.io/projected/f8b254d9-2040-4662-8949-eeeec8786ac3-kube-api-access-m4ckf\") pod \"redhat-operators-f2vvm\" (UID: \"f8b254d9-2040-4662-8949-eeeec8786ac3\") " pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.608865 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64dc7de7-32f2-49ae-9719-c347dd0f340a-utilities\") pod \"community-operators-76q7q\" (UID: \"64dc7de7-32f2-49ae-9719-c347dd0f340a\") " pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.609285 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64dc7de7-32f2-49ae-9719-c347dd0f340a-utilities\") pod \"community-operators-76q7q\" (UID: \"64dc7de7-32f2-49ae-9719-c347dd0f340a\") " pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.608905 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64dc7de7-32f2-49ae-9719-c347dd0f340a-catalog-content\") pod \"community-operators-76q7q\" (UID: \"64dc7de7-32f2-49ae-9719-c347dd0f340a\") " pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.609397 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p884c\" (UniqueName: \"kubernetes.io/projected/64dc7de7-32f2-49ae-9719-c347dd0f340a-kube-api-access-p884c\") pod \"community-operators-76q7q\" (UID: \"64dc7de7-32f2-49ae-9719-c347dd0f340a\") " pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.609582 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64dc7de7-32f2-49ae-9719-c347dd0f340a-catalog-content\") pod \"community-operators-76q7q\" (UID: \"64dc7de7-32f2-49ae-9719-c347dd0f340a\") " pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.623293 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.632874 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p884c\" (UniqueName: \"kubernetes.io/projected/64dc7de7-32f2-49ae-9719-c347dd0f340a-kube-api-access-p884c\") pod \"community-operators-76q7q\" (UID: \"64dc7de7-32f2-49ae-9719-c347dd0f340a\") " pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.808538 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f2vvm"] Sep 30 20:18:03 crc kubenswrapper[4919]: I0930 20:18:03.878053 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:04 crc kubenswrapper[4919]: I0930 20:18:04.038089 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-76q7q"] Sep 30 20:18:04 crc kubenswrapper[4919]: W0930 20:18:04.051520 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64dc7de7_32f2_49ae_9719_c347dd0f340a.slice/crio-6e4a224c836b78dcdca7708885a696494510592f0951f16240773dc09fd6f458 WatchSource:0}: Error finding container 6e4a224c836b78dcdca7708885a696494510592f0951f16240773dc09fd6f458: Status 404 returned error can't find the container with id 6e4a224c836b78dcdca7708885a696494510592f0951f16240773dc09fd6f458 Sep 30 20:18:04 crc kubenswrapper[4919]: I0930 20:18:04.496490 4919 generic.go:334] "Generic (PLEG): container finished" podID="2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50" containerID="cdb899a7979685bd553367b8e0cb3f617b4598dffc26777a1537225f591d8237" exitCode=0 Sep 30 20:18:04 crc kubenswrapper[4919]: I0930 20:18:04.496598 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pbrx9" event={"ID":"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50","Type":"ContainerDied","Data":"cdb899a7979685bd553367b8e0cb3f617b4598dffc26777a1537225f591d8237"} Sep 30 20:18:04 crc kubenswrapper[4919]: I0930 20:18:04.501960 4919 generic.go:334] "Generic (PLEG): container finished" podID="64dc7de7-32f2-49ae-9719-c347dd0f340a" containerID="14c7e15da49ea5c16bc45147cd60d1ab318eeca63f76172af350e9423f4b3106" exitCode=0 Sep 30 20:18:04 crc kubenswrapper[4919]: I0930 20:18:04.502468 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-76q7q" event={"ID":"64dc7de7-32f2-49ae-9719-c347dd0f340a","Type":"ContainerDied","Data":"14c7e15da49ea5c16bc45147cd60d1ab318eeca63f76172af350e9423f4b3106"} Sep 30 20:18:04 crc kubenswrapper[4919]: I0930 20:18:04.502510 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-76q7q" event={"ID":"64dc7de7-32f2-49ae-9719-c347dd0f340a","Type":"ContainerStarted","Data":"6e4a224c836b78dcdca7708885a696494510592f0951f16240773dc09fd6f458"} Sep 30 20:18:04 crc kubenswrapper[4919]: I0930 20:18:04.510355 4919 generic.go:334] "Generic (PLEG): container finished" podID="f8b254d9-2040-4662-8949-eeeec8786ac3" containerID="9d9f7ac51c22b4af3f112a98ca94fac05cee02242335a5db086580c250b9afb9" exitCode=0 Sep 30 20:18:04 crc kubenswrapper[4919]: I0930 20:18:04.510654 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2vvm" event={"ID":"f8b254d9-2040-4662-8949-eeeec8786ac3","Type":"ContainerDied","Data":"9d9f7ac51c22b4af3f112a98ca94fac05cee02242335a5db086580c250b9afb9"} Sep 30 20:18:04 crc kubenswrapper[4919]: I0930 20:18:04.510744 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2vvm" event={"ID":"f8b254d9-2040-4662-8949-eeeec8786ac3","Type":"ContainerStarted","Data":"eb0e1012108b21aa5cd6199a9a33da47e33a2fa6ecbb849c3a0d48f8e7bb065e"} Sep 30 20:18:04 crc kubenswrapper[4919]: I0930 20:18:04.526270 4919 generic.go:334] "Generic (PLEG): container finished" podID="1dcc4f23-6453-44f6-943a-0a79f2f6e224" containerID="d50b065680fb23cd6acac58925f3af92c30f636efc23685f39eda6f04f083f59" exitCode=0 Sep 30 20:18:04 crc kubenswrapper[4919]: I0930 20:18:04.526320 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4gtrb" event={"ID":"1dcc4f23-6453-44f6-943a-0a79f2f6e224","Type":"ContainerDied","Data":"d50b065680fb23cd6acac58925f3af92c30f636efc23685f39eda6f04f083f59"} Sep 30 20:18:05 crc kubenswrapper[4919]: I0930 20:18:05.536176 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-76q7q" event={"ID":"64dc7de7-32f2-49ae-9719-c347dd0f340a","Type":"ContainerStarted","Data":"7a35f778aa4bcc05035bb54c87020b13677342f8b180a9faaa7143ffc66f7476"} Sep 30 20:18:05 crc kubenswrapper[4919]: I0930 20:18:05.538395 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2vvm" event={"ID":"f8b254d9-2040-4662-8949-eeeec8786ac3","Type":"ContainerStarted","Data":"ccdcbc222963a1227af72ed3ddf25987295286d6fb87c4a54c12ade68e9ac0c5"} Sep 30 20:18:05 crc kubenswrapper[4919]: I0930 20:18:05.540797 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4gtrb" event={"ID":"1dcc4f23-6453-44f6-943a-0a79f2f6e224","Type":"ContainerStarted","Data":"7d28b91ef45453e1c4d276bac843e57be00a523ed43a6777f57d20b7b911ec46"} Sep 30 20:18:05 crc kubenswrapper[4919]: I0930 20:18:05.543873 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pbrx9" event={"ID":"2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50","Type":"ContainerStarted","Data":"b00d27f0490bb9d8629b1150c3cffba3c69e5c17588aebe9e5ee3dd1dcd99245"} Sep 30 20:18:05 crc kubenswrapper[4919]: I0930 20:18:05.592418 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4gtrb" podStartSLOduration=2.8701306779999998 podStartE2EDuration="5.592397406s" podCreationTimestamp="2025-09-30 20:18:00 +0000 UTC" firstStartedPulling="2025-09-30 20:18:02.477872589 +0000 UTC m=+267.593905736" lastFinishedPulling="2025-09-30 20:18:05.200139337 +0000 UTC m=+270.316172464" observedRunningTime="2025-09-30 20:18:05.591384365 +0000 UTC m=+270.707417492" watchObservedRunningTime="2025-09-30 20:18:05.592397406 +0000 UTC m=+270.708430533" Sep 30 20:18:06 crc kubenswrapper[4919]: I0930 20:18:06.550455 4919 generic.go:334] "Generic (PLEG): container finished" podID="f8b254d9-2040-4662-8949-eeeec8786ac3" containerID="ccdcbc222963a1227af72ed3ddf25987295286d6fb87c4a54c12ade68e9ac0c5" exitCode=0 Sep 30 20:18:06 crc kubenswrapper[4919]: I0930 20:18:06.550513 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2vvm" event={"ID":"f8b254d9-2040-4662-8949-eeeec8786ac3","Type":"ContainerDied","Data":"ccdcbc222963a1227af72ed3ddf25987295286d6fb87c4a54c12ade68e9ac0c5"} Sep 30 20:18:06 crc kubenswrapper[4919]: I0930 20:18:06.554058 4919 generic.go:334] "Generic (PLEG): container finished" podID="64dc7de7-32f2-49ae-9719-c347dd0f340a" containerID="7a35f778aa4bcc05035bb54c87020b13677342f8b180a9faaa7143ffc66f7476" exitCode=0 Sep 30 20:18:06 crc kubenswrapper[4919]: I0930 20:18:06.554128 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-76q7q" event={"ID":"64dc7de7-32f2-49ae-9719-c347dd0f340a","Type":"ContainerDied","Data":"7a35f778aa4bcc05035bb54c87020b13677342f8b180a9faaa7143ffc66f7476"} Sep 30 20:18:06 crc kubenswrapper[4919]: I0930 20:18:06.567904 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pbrx9" podStartSLOduration=3.024086172 podStartE2EDuration="5.56788151s" podCreationTimestamp="2025-09-30 20:18:01 +0000 UTC" firstStartedPulling="2025-09-30 20:18:02.479606872 +0000 UTC m=+267.595640019" lastFinishedPulling="2025-09-30 20:18:05.02340223 +0000 UTC m=+270.139435357" observedRunningTime="2025-09-30 20:18:05.614804584 +0000 UTC m=+270.730837711" watchObservedRunningTime="2025-09-30 20:18:06.56788151 +0000 UTC m=+271.683914637" Sep 30 20:18:07 crc kubenswrapper[4919]: I0930 20:18:07.562134 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-76q7q" event={"ID":"64dc7de7-32f2-49ae-9719-c347dd0f340a","Type":"ContainerStarted","Data":"121bbd3e05b50fdce1d65d0b652f909e42db1d204b188cd376f4bfa46390ae9d"} Sep 30 20:18:07 crc kubenswrapper[4919]: I0930 20:18:07.564445 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2vvm" event={"ID":"f8b254d9-2040-4662-8949-eeeec8786ac3","Type":"ContainerStarted","Data":"63762ed8c6d6f42b66d19aff2d9b1d5374a6fbc36e8866706ea24ab834a79124"} Sep 30 20:18:07 crc kubenswrapper[4919]: I0930 20:18:07.581000 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-76q7q" podStartSLOduration=2.109699571 podStartE2EDuration="4.580983114s" podCreationTimestamp="2025-09-30 20:18:03 +0000 UTC" firstStartedPulling="2025-09-30 20:18:04.511631925 +0000 UTC m=+269.627665062" lastFinishedPulling="2025-09-30 20:18:06.982915468 +0000 UTC m=+272.098948605" observedRunningTime="2025-09-30 20:18:07.576414926 +0000 UTC m=+272.692448053" watchObservedRunningTime="2025-09-30 20:18:07.580983114 +0000 UTC m=+272.697016241" Sep 30 20:18:07 crc kubenswrapper[4919]: I0930 20:18:07.600058 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-f2vvm" podStartSLOduration=1.890793828 podStartE2EDuration="4.600040411s" podCreationTimestamp="2025-09-30 20:18:03 +0000 UTC" firstStartedPulling="2025-09-30 20:18:04.51477376 +0000 UTC m=+269.630806897" lastFinishedPulling="2025-09-30 20:18:07.224020353 +0000 UTC m=+272.340053480" observedRunningTime="2025-09-30 20:18:07.597578476 +0000 UTC m=+272.713611603" watchObservedRunningTime="2025-09-30 20:18:07.600040411 +0000 UTC m=+272.716073538" Sep 30 20:18:11 crc kubenswrapper[4919]: I0930 20:18:11.240471 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:11 crc kubenswrapper[4919]: I0930 20:18:11.241173 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:11 crc kubenswrapper[4919]: I0930 20:18:11.306887 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:11 crc kubenswrapper[4919]: I0930 20:18:11.391367 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:11 crc kubenswrapper[4919]: I0930 20:18:11.391425 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:11 crc kubenswrapper[4919]: I0930 20:18:11.424740 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:11 crc kubenswrapper[4919]: I0930 20:18:11.638555 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4gtrb" Sep 30 20:18:11 crc kubenswrapper[4919]: I0930 20:18:11.638599 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pbrx9" Sep 30 20:18:13 crc kubenswrapper[4919]: I0930 20:18:13.624355 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:13 crc kubenswrapper[4919]: I0930 20:18:13.624405 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:13 crc kubenswrapper[4919]: I0930 20:18:13.671415 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:13 crc kubenswrapper[4919]: I0930 20:18:13.878384 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:13 crc kubenswrapper[4919]: I0930 20:18:13.878768 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:13 crc kubenswrapper[4919]: I0930 20:18:13.927420 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:18:14 crc kubenswrapper[4919]: I0930 20:18:14.655989 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-f2vvm" Sep 30 20:18:14 crc kubenswrapper[4919]: I0930 20:18:14.660781 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-76q7q" Sep 30 20:19:26 crc kubenswrapper[4919]: I0930 20:19:26.061964 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:19:26 crc kubenswrapper[4919]: I0930 20:19:26.062595 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:19:56 crc kubenswrapper[4919]: I0930 20:19:56.063558 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:19:56 crc kubenswrapper[4919]: I0930 20:19:56.064833 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:20:03 crc kubenswrapper[4919]: I0930 20:20:03.950188 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-r297c"] Sep 30 20:20:03 crc kubenswrapper[4919]: I0930 20:20:03.952206 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:03 crc kubenswrapper[4919]: I0930 20:20:03.970886 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-r297c"] Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.113077 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.113126 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/006f3c7d-5859-4373-97e0-f41fa4317cc3-ca-trust-extracted\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.113162 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/006f3c7d-5859-4373-97e0-f41fa4317cc3-trusted-ca\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.113181 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/006f3c7d-5859-4373-97e0-f41fa4317cc3-bound-sa-token\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.113292 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/006f3c7d-5859-4373-97e0-f41fa4317cc3-registry-certificates\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.113318 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/006f3c7d-5859-4373-97e0-f41fa4317cc3-installation-pull-secrets\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.113349 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/006f3c7d-5859-4373-97e0-f41fa4317cc3-registry-tls\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.113365 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52bmq\" (UniqueName: \"kubernetes.io/projected/006f3c7d-5859-4373-97e0-f41fa4317cc3-kube-api-access-52bmq\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.134172 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.214927 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/006f3c7d-5859-4373-97e0-f41fa4317cc3-registry-certificates\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.215002 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/006f3c7d-5859-4373-97e0-f41fa4317cc3-installation-pull-secrets\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.215064 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/006f3c7d-5859-4373-97e0-f41fa4317cc3-registry-tls\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.215095 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52bmq\" (UniqueName: \"kubernetes.io/projected/006f3c7d-5859-4373-97e0-f41fa4317cc3-kube-api-access-52bmq\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.215167 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/006f3c7d-5859-4373-97e0-f41fa4317cc3-ca-trust-extracted\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.215266 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/006f3c7d-5859-4373-97e0-f41fa4317cc3-trusted-ca\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.215320 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/006f3c7d-5859-4373-97e0-f41fa4317cc3-bound-sa-token\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.216336 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/006f3c7d-5859-4373-97e0-f41fa4317cc3-ca-trust-extracted\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.217113 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/006f3c7d-5859-4373-97e0-f41fa4317cc3-registry-certificates\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.217287 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/006f3c7d-5859-4373-97e0-f41fa4317cc3-trusted-ca\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.227244 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/006f3c7d-5859-4373-97e0-f41fa4317cc3-installation-pull-secrets\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.227444 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/006f3c7d-5859-4373-97e0-f41fa4317cc3-registry-tls\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.249797 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52bmq\" (UniqueName: \"kubernetes.io/projected/006f3c7d-5859-4373-97e0-f41fa4317cc3-kube-api-access-52bmq\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.250204 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/006f3c7d-5859-4373-97e0-f41fa4317cc3-bound-sa-token\") pod \"image-registry-66df7c8f76-r297c\" (UID: \"006f3c7d-5859-4373-97e0-f41fa4317cc3\") " pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.274065 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:04 crc kubenswrapper[4919]: I0930 20:20:04.746133 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-r297c"] Sep 30 20:20:05 crc kubenswrapper[4919]: I0930 20:20:05.411282 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-r297c" event={"ID":"006f3c7d-5859-4373-97e0-f41fa4317cc3","Type":"ContainerStarted","Data":"766b69436f45643ed269c6b48c32d4974f8cb4356d8051fa2d730b9684d2d111"} Sep 30 20:20:05 crc kubenswrapper[4919]: I0930 20:20:05.411337 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-r297c" event={"ID":"006f3c7d-5859-4373-97e0-f41fa4317cc3","Type":"ContainerStarted","Data":"1a13555a6d1ef41c634f54e7701b4dd5e5b580e6b62619868ff7efa6411aa3cb"} Sep 30 20:20:05 crc kubenswrapper[4919]: I0930 20:20:05.411466 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:05 crc kubenswrapper[4919]: I0930 20:20:05.454420 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-r297c" podStartSLOduration=2.454393853 podStartE2EDuration="2.454393853s" podCreationTimestamp="2025-09-30 20:20:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:20:05.453803435 +0000 UTC m=+390.569836592" watchObservedRunningTime="2025-09-30 20:20:05.454393853 +0000 UTC m=+390.570427020" Sep 30 20:20:24 crc kubenswrapper[4919]: I0930 20:20:24.282570 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-r297c" Sep 30 20:20:24 crc kubenswrapper[4919]: I0930 20:20:24.357424 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zpbl8"] Sep 30 20:20:26 crc kubenswrapper[4919]: I0930 20:20:26.061862 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:20:26 crc kubenswrapper[4919]: I0930 20:20:26.062160 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:20:26 crc kubenswrapper[4919]: I0930 20:20:26.062210 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:20:26 crc kubenswrapper[4919]: I0930 20:20:26.062830 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5a35d751e5b4d240d41cc223a5740a55816947b68cfc97f9ad89a3a19385bfe6"} pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:20:26 crc kubenswrapper[4919]: I0930 20:20:26.062875 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" containerID="cri-o://5a35d751e5b4d240d41cc223a5740a55816947b68cfc97f9ad89a3a19385bfe6" gracePeriod=600 Sep 30 20:20:26 crc kubenswrapper[4919]: I0930 20:20:26.551281 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb371a63-6d82-453e-930e-656710b97f10" containerID="5a35d751e5b4d240d41cc223a5740a55816947b68cfc97f9ad89a3a19385bfe6" exitCode=0 Sep 30 20:20:26 crc kubenswrapper[4919]: I0930 20:20:26.551352 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerDied","Data":"5a35d751e5b4d240d41cc223a5740a55816947b68cfc97f9ad89a3a19385bfe6"} Sep 30 20:20:26 crc kubenswrapper[4919]: I0930 20:20:26.552106 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"a845129a4d8c98980a8f13a053b2a40edd21a9b836def7381c0f8f8c97e05996"} Sep 30 20:20:26 crc kubenswrapper[4919]: I0930 20:20:26.552189 4919 scope.go:117] "RemoveContainer" containerID="a49b3fe0a8e67222fca5cda5ec85da95115d796f66de94b2ab3fbbd938f562aa" Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.414080 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" podUID="6be387e2-3aff-43e1-91bc-bc8257764da1" containerName="registry" containerID="cri-o://6f651912a7ea66d9bee35d136c8df7325d2f88f220256e6c07771545897963bf" gracePeriod=30 Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.716302 4919 generic.go:334] "Generic (PLEG): container finished" podID="6be387e2-3aff-43e1-91bc-bc8257764da1" containerID="6f651912a7ea66d9bee35d136c8df7325d2f88f220256e6c07771545897963bf" exitCode=0 Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.716373 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" event={"ID":"6be387e2-3aff-43e1-91bc-bc8257764da1","Type":"ContainerDied","Data":"6f651912a7ea66d9bee35d136c8df7325d2f88f220256e6c07771545897963bf"} Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.795002 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.953323 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-bound-sa-token\") pod \"6be387e2-3aff-43e1-91bc-bc8257764da1\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.953441 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6be387e2-3aff-43e1-91bc-bc8257764da1-registry-certificates\") pod \"6be387e2-3aff-43e1-91bc-bc8257764da1\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.953523 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-registry-tls\") pod \"6be387e2-3aff-43e1-91bc-bc8257764da1\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.953757 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"6be387e2-3aff-43e1-91bc-bc8257764da1\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.953825 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6be387e2-3aff-43e1-91bc-bc8257764da1-trusted-ca\") pod \"6be387e2-3aff-43e1-91bc-bc8257764da1\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.953867 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27jn7\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-kube-api-access-27jn7\") pod \"6be387e2-3aff-43e1-91bc-bc8257764da1\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.953943 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6be387e2-3aff-43e1-91bc-bc8257764da1-ca-trust-extracted\") pod \"6be387e2-3aff-43e1-91bc-bc8257764da1\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.953982 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6be387e2-3aff-43e1-91bc-bc8257764da1-installation-pull-secrets\") pod \"6be387e2-3aff-43e1-91bc-bc8257764da1\" (UID: \"6be387e2-3aff-43e1-91bc-bc8257764da1\") " Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.954960 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6be387e2-3aff-43e1-91bc-bc8257764da1-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "6be387e2-3aff-43e1-91bc-bc8257764da1" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.955977 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6be387e2-3aff-43e1-91bc-bc8257764da1-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "6be387e2-3aff-43e1-91bc-bc8257764da1" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.968000 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "6be387e2-3aff-43e1-91bc-bc8257764da1" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.968130 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6be387e2-3aff-43e1-91bc-bc8257764da1-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "6be387e2-3aff-43e1-91bc-bc8257764da1" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.968271 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "6be387e2-3aff-43e1-91bc-bc8257764da1" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.968684 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-kube-api-access-27jn7" (OuterVolumeSpecName: "kube-api-access-27jn7") pod "6be387e2-3aff-43e1-91bc-bc8257764da1" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1"). InnerVolumeSpecName "kube-api-access-27jn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.970723 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "6be387e2-3aff-43e1-91bc-bc8257764da1" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:20:49 crc kubenswrapper[4919]: I0930 20:20:49.990648 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6be387e2-3aff-43e1-91bc-bc8257764da1-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "6be387e2-3aff-43e1-91bc-bc8257764da1" (UID: "6be387e2-3aff-43e1-91bc-bc8257764da1"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:20:50 crc kubenswrapper[4919]: I0930 20:20:50.055640 4919 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6be387e2-3aff-43e1-91bc-bc8257764da1-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:50 crc kubenswrapper[4919]: I0930 20:20:50.055692 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27jn7\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-kube-api-access-27jn7\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:50 crc kubenswrapper[4919]: I0930 20:20:50.055715 4919 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/6be387e2-3aff-43e1-91bc-bc8257764da1-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:50 crc kubenswrapper[4919]: I0930 20:20:50.055743 4919 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/6be387e2-3aff-43e1-91bc-bc8257764da1-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:50 crc kubenswrapper[4919]: I0930 20:20:50.055766 4919 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:50 crc kubenswrapper[4919]: I0930 20:20:50.055792 4919 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/6be387e2-3aff-43e1-91bc-bc8257764da1-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:50 crc kubenswrapper[4919]: I0930 20:20:50.055828 4919 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/6be387e2-3aff-43e1-91bc-bc8257764da1-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:20:50 crc kubenswrapper[4919]: I0930 20:20:50.726169 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" event={"ID":"6be387e2-3aff-43e1-91bc-bc8257764da1","Type":"ContainerDied","Data":"eba7282bb6685f3d0fe2caf7b68c3cf7d1399cd18719fb9706b632e9c7928509"} Sep 30 20:20:50 crc kubenswrapper[4919]: I0930 20:20:50.726292 4919 scope.go:117] "RemoveContainer" containerID="6f651912a7ea66d9bee35d136c8df7325d2f88f220256e6c07771545897963bf" Sep 30 20:20:50 crc kubenswrapper[4919]: I0930 20:20:50.727469 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zpbl8" Sep 30 20:20:50 crc kubenswrapper[4919]: I0930 20:20:50.781899 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zpbl8"] Sep 30 20:20:50 crc kubenswrapper[4919]: I0930 20:20:50.786516 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zpbl8"] Sep 30 20:20:50 crc kubenswrapper[4919]: E0930 20:20:50.820635 4919 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6be387e2_3aff_43e1_91bc_bc8257764da1.slice\": RecentStats: unable to find data in memory cache]" Sep 30 20:20:51 crc kubenswrapper[4919]: I0930 20:20:51.645600 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6be387e2-3aff-43e1-91bc-bc8257764da1" path="/var/lib/kubelet/pods/6be387e2-3aff-43e1-91bc-bc8257764da1/volumes" Sep 30 20:22:26 crc kubenswrapper[4919]: I0930 20:22:26.062833 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:22:26 crc kubenswrapper[4919]: I0930 20:22:26.063551 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:22:56 crc kubenswrapper[4919]: I0930 20:22:56.062289 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:22:56 crc kubenswrapper[4919]: I0930 20:22:56.062969 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.173790 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-gvrpb"] Sep 30 20:23:15 crc kubenswrapper[4919]: E0930 20:23:15.176389 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6be387e2-3aff-43e1-91bc-bc8257764da1" containerName="registry" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.176410 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="6be387e2-3aff-43e1-91bc-bc8257764da1" containerName="registry" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.176538 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="6be387e2-3aff-43e1-91bc-bc8257764da1" containerName="registry" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.176871 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-gvrpb" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.182286 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-pdg2j"] Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.182788 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.182902 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.182818 4919 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-x288t" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.183088 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-pdg2j" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.188409 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-qqlgt"] Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.189043 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-gvrpb"] Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.189384 4919 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-pjsds" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.190026 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-qqlgt" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.192904 4919 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-5642m" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.202352 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-qqlgt"] Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.205484 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-pdg2j"] Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.293994 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5cst\" (UniqueName: \"kubernetes.io/projected/5d98bfd5-9d79-4bc5-9525-b9dae37efe66-kube-api-access-l5cst\") pod \"cert-manager-cainjector-7f985d654d-gvrpb\" (UID: \"5d98bfd5-9d79-4bc5-9525-b9dae37efe66\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-gvrpb" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.294310 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5pcs\" (UniqueName: \"kubernetes.io/projected/5a0a267a-c9dc-41f4-bbfe-9ca579ac8c74-kube-api-access-s5pcs\") pod \"cert-manager-5b446d88c5-pdg2j\" (UID: \"5a0a267a-c9dc-41f4-bbfe-9ca579ac8c74\") " pod="cert-manager/cert-manager-5b446d88c5-pdg2j" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.294424 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxkqq\" (UniqueName: \"kubernetes.io/projected/8273c2ae-c5d8-4e76-bc5b-ca2c4bce93dc-kube-api-access-gxkqq\") pod \"cert-manager-webhook-5655c58dd6-qqlgt\" (UID: \"8273c2ae-c5d8-4e76-bc5b-ca2c4bce93dc\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-qqlgt" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.395987 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5pcs\" (UniqueName: \"kubernetes.io/projected/5a0a267a-c9dc-41f4-bbfe-9ca579ac8c74-kube-api-access-s5pcs\") pod \"cert-manager-5b446d88c5-pdg2j\" (UID: \"5a0a267a-c9dc-41f4-bbfe-9ca579ac8c74\") " pod="cert-manager/cert-manager-5b446d88c5-pdg2j" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.396050 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxkqq\" (UniqueName: \"kubernetes.io/projected/8273c2ae-c5d8-4e76-bc5b-ca2c4bce93dc-kube-api-access-gxkqq\") pod \"cert-manager-webhook-5655c58dd6-qqlgt\" (UID: \"8273c2ae-c5d8-4e76-bc5b-ca2c4bce93dc\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-qqlgt" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.396187 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5cst\" (UniqueName: \"kubernetes.io/projected/5d98bfd5-9d79-4bc5-9525-b9dae37efe66-kube-api-access-l5cst\") pod \"cert-manager-cainjector-7f985d654d-gvrpb\" (UID: \"5d98bfd5-9d79-4bc5-9525-b9dae37efe66\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-gvrpb" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.418314 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5pcs\" (UniqueName: \"kubernetes.io/projected/5a0a267a-c9dc-41f4-bbfe-9ca579ac8c74-kube-api-access-s5pcs\") pod \"cert-manager-5b446d88c5-pdg2j\" (UID: \"5a0a267a-c9dc-41f4-bbfe-9ca579ac8c74\") " pod="cert-manager/cert-manager-5b446d88c5-pdg2j" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.424830 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxkqq\" (UniqueName: \"kubernetes.io/projected/8273c2ae-c5d8-4e76-bc5b-ca2c4bce93dc-kube-api-access-gxkqq\") pod \"cert-manager-webhook-5655c58dd6-qqlgt\" (UID: \"8273c2ae-c5d8-4e76-bc5b-ca2c4bce93dc\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-qqlgt" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.430332 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5cst\" (UniqueName: \"kubernetes.io/projected/5d98bfd5-9d79-4bc5-9525-b9dae37efe66-kube-api-access-l5cst\") pod \"cert-manager-cainjector-7f985d654d-gvrpb\" (UID: \"5d98bfd5-9d79-4bc5-9525-b9dae37efe66\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-gvrpb" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.494769 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-gvrpb" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.506707 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-pdg2j" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.512773 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-qqlgt" Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.713695 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-gvrpb"] Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.727341 4919 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.772804 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-qqlgt"] Sep 30 20:23:15 crc kubenswrapper[4919]: W0930 20:23:15.776946 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8273c2ae_c5d8_4e76_bc5b_ca2c4bce93dc.slice/crio-0bdfdc4c1ba8a6f87ed2b6b9d2caaf4f7ef216caaded606a673f46ac488a6643 WatchSource:0}: Error finding container 0bdfdc4c1ba8a6f87ed2b6b9d2caaf4f7ef216caaded606a673f46ac488a6643: Status 404 returned error can't find the container with id 0bdfdc4c1ba8a6f87ed2b6b9d2caaf4f7ef216caaded606a673f46ac488a6643 Sep 30 20:23:15 crc kubenswrapper[4919]: I0930 20:23:15.993526 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-pdg2j"] Sep 30 20:23:16 crc kubenswrapper[4919]: W0930 20:23:16.000465 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a0a267a_c9dc_41f4_bbfe_9ca579ac8c74.slice/crio-e1a3528b017b484c43db89bd431890ebd65183a7333c053019105900cc2bc387 WatchSource:0}: Error finding container e1a3528b017b484c43db89bd431890ebd65183a7333c053019105900cc2bc387: Status 404 returned error can't find the container with id e1a3528b017b484c43db89bd431890ebd65183a7333c053019105900cc2bc387 Sep 30 20:23:16 crc kubenswrapper[4919]: I0930 20:23:16.722278 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-pdg2j" event={"ID":"5a0a267a-c9dc-41f4-bbfe-9ca579ac8c74","Type":"ContainerStarted","Data":"e1a3528b017b484c43db89bd431890ebd65183a7333c053019105900cc2bc387"} Sep 30 20:23:16 crc kubenswrapper[4919]: I0930 20:23:16.723525 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-qqlgt" event={"ID":"8273c2ae-c5d8-4e76-bc5b-ca2c4bce93dc","Type":"ContainerStarted","Data":"0bdfdc4c1ba8a6f87ed2b6b9d2caaf4f7ef216caaded606a673f46ac488a6643"} Sep 30 20:23:16 crc kubenswrapper[4919]: I0930 20:23:16.727514 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-gvrpb" event={"ID":"5d98bfd5-9d79-4bc5-9525-b9dae37efe66","Type":"ContainerStarted","Data":"29126eca41f8af7c471aade22ac2e92cb1941837e4ce4952bed42a343f9cfdb9"} Sep 30 20:23:18 crc kubenswrapper[4919]: I0930 20:23:18.747943 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-gvrpb" event={"ID":"5d98bfd5-9d79-4bc5-9525-b9dae37efe66","Type":"ContainerStarted","Data":"c518806634323a70a1bb6abb8e498e7205c6801f5031536bad2a2ba15988cbe6"} Sep 30 20:23:18 crc kubenswrapper[4919]: I0930 20:23:18.770608 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-gvrpb" podStartSLOduration=1.738775027 podStartE2EDuration="3.770592595s" podCreationTimestamp="2025-09-30 20:23:15 +0000 UTC" firstStartedPulling="2025-09-30 20:23:15.726977098 +0000 UTC m=+580.843010235" lastFinishedPulling="2025-09-30 20:23:17.758794676 +0000 UTC m=+582.874827803" observedRunningTime="2025-09-30 20:23:18.768563466 +0000 UTC m=+583.884596593" watchObservedRunningTime="2025-09-30 20:23:18.770592595 +0000 UTC m=+583.886625722" Sep 30 20:23:19 crc kubenswrapper[4919]: I0930 20:23:19.758896 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-pdg2j" event={"ID":"5a0a267a-c9dc-41f4-bbfe-9ca579ac8c74","Type":"ContainerStarted","Data":"e47a2dfb6cb2d258827fe274bb6e80f605c73679d9d727b035d7722677fff36c"} Sep 30 20:23:19 crc kubenswrapper[4919]: I0930 20:23:19.762683 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-qqlgt" event={"ID":"8273c2ae-c5d8-4e76-bc5b-ca2c4bce93dc","Type":"ContainerStarted","Data":"56a3086bacecf9063091cb7c221677490de5e27491565bed5d00a2e2abf011fe"} Sep 30 20:23:19 crc kubenswrapper[4919]: I0930 20:23:19.810547 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-pdg2j" podStartSLOduration=1.599614434 podStartE2EDuration="4.810522562s" podCreationTimestamp="2025-09-30 20:23:15 +0000 UTC" firstStartedPulling="2025-09-30 20:23:16.00357533 +0000 UTC m=+581.119608457" lastFinishedPulling="2025-09-30 20:23:19.214483458 +0000 UTC m=+584.330516585" observedRunningTime="2025-09-30 20:23:19.807021401 +0000 UTC m=+584.923054558" watchObservedRunningTime="2025-09-30 20:23:19.810522562 +0000 UTC m=+584.926555729" Sep 30 20:23:19 crc kubenswrapper[4919]: I0930 20:23:19.838467 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-qqlgt" podStartSLOduration=1.417341489 podStartE2EDuration="4.838440344s" podCreationTimestamp="2025-09-30 20:23:15 +0000 UTC" firstStartedPulling="2025-09-30 20:23:15.77902237 +0000 UTC m=+580.895055497" lastFinishedPulling="2025-09-30 20:23:19.200121225 +0000 UTC m=+584.316154352" observedRunningTime="2025-09-30 20:23:19.835108438 +0000 UTC m=+584.951141595" watchObservedRunningTime="2025-09-30 20:23:19.838440344 +0000 UTC m=+584.954473501" Sep 30 20:23:20 crc kubenswrapper[4919]: I0930 20:23:20.513337 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-qqlgt" Sep 30 20:23:25 crc kubenswrapper[4919]: I0930 20:23:25.516905 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-qqlgt" Sep 30 20:23:25 crc kubenswrapper[4919]: I0930 20:23:25.793500 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4p25c"] Sep 30 20:23:25 crc kubenswrapper[4919]: I0930 20:23:25.794727 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="sbdb" containerID="cri-o://8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826" gracePeriod=30 Sep 30 20:23:25 crc kubenswrapper[4919]: I0930 20:23:25.794950 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459" gracePeriod=30 Sep 30 20:23:25 crc kubenswrapper[4919]: I0930 20:23:25.795048 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="northd" containerID="cri-o://3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e" gracePeriod=30 Sep 30 20:23:25 crc kubenswrapper[4919]: I0930 20:23:25.795133 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovn-acl-logging" containerID="cri-o://42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44" gracePeriod=30 Sep 30 20:23:25 crc kubenswrapper[4919]: I0930 20:23:25.795131 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="kube-rbac-proxy-node" containerID="cri-o://e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851" gracePeriod=30 Sep 30 20:23:25 crc kubenswrapper[4919]: I0930 20:23:25.795313 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="nbdb" containerID="cri-o://c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1" gracePeriod=30 Sep 30 20:23:25 crc kubenswrapper[4919]: I0930 20:23:25.794613 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovn-controller" containerID="cri-o://ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba" gracePeriod=30 Sep 30 20:23:25 crc kubenswrapper[4919]: I0930 20:23:25.840048 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" containerID="cri-o://70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1" gracePeriod=30 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.061579 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.061919 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.061960 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.062413 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a845129a4d8c98980a8f13a053b2a40edd21a9b836def7381c0f8f8c97e05996"} pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.062472 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" containerID="cri-o://a845129a4d8c98980a8f13a053b2a40edd21a9b836def7381c0f8f8c97e05996" gracePeriod=600 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.174959 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/3.log" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.178087 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovn-acl-logging/0.log" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.178831 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovn-controller/0.log" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.179473 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.247588 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-jr4hc"] Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.247882 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="nbdb" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.247909 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="nbdb" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.247940 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.247952 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.247965 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="sbdb" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.247978 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="sbdb" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.247995 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="northd" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248007 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="northd" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.248021 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovn-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248033 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovn-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.248053 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248065 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.248078 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248090 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.248107 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovn-acl-logging" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248120 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovn-acl-logging" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.248135 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="kube-rbac-proxy-node" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248146 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="kube-rbac-proxy-node" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.248159 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248171 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.248188 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248202 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.248243 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="kubecfg-setup" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248255 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="kubecfg-setup" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.248272 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248284 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248444 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248461 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="kube-rbac-proxy-node" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248478 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="northd" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248492 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovn-acl-logging" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248507 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248523 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="sbdb" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248539 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="kube-rbac-proxy-ovn-metrics" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248556 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248567 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248581 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovn-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.248597 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="nbdb" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249097 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af48d482-2587-4521-ba91-56d35b0e487d" containerName="ovnkube-controller" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249183 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-ovnkube-config\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249235 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-ovnkube-script-lib\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249260 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-systemd\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249286 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-kubelet\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249329 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-log-socket\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249414 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249473 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-openvswitch\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249539 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249597 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249636 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-env-overrides\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249619 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-log-socket" (OuterVolumeSpecName: "log-socket") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249686 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249657 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-cni-bin\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249738 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/af48d482-2587-4521-ba91-56d35b0e487d-ovn-node-metrics-cert\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249667 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249783 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.249790 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250049 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250023 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250175 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-ovn\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250223 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-var-lib-openvswitch\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250244 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-run-netns\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250260 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-etc-openvswitch\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250282 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-node-log\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250299 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jx9g\" (UniqueName: \"kubernetes.io/projected/af48d482-2587-4521-ba91-56d35b0e487d-kube-api-access-9jx9g\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250290 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250325 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-cni-netd\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250332 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-node-log" (OuterVolumeSpecName: "node-log") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250340 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-slash\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250361 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-run-ovn-kubernetes\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250356 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250355 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250391 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-systemd-units\") pod \"af48d482-2587-4521-ba91-56d35b0e487d\" (UID: \"af48d482-2587-4521-ba91-56d35b0e487d\") " Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250416 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250426 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-slash" (OuterVolumeSpecName: "host-slash") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250425 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.250443 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251015 4919 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251048 4919 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251072 4919 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251094 4919 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251113 4919 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251135 4919 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251153 4919 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251171 4919 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251189 4919 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-node-log\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251207 4919 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251266 4919 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-slash\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251326 4919 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251355 4919 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251380 4919 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251406 4919 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/af48d482-2587-4521-ba91-56d35b0e487d-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251433 4919 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251456 4919 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-log-socket\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.251547 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.255852 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af48d482-2587-4521-ba91-56d35b0e487d-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.262844 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af48d482-2587-4521-ba91-56d35b0e487d-kube-api-access-9jx9g" (OuterVolumeSpecName: "kube-api-access-9jx9g") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "kube-api-access-9jx9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.281197 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "af48d482-2587-4521-ba91-56d35b0e487d" (UID: "af48d482-2587-4521-ba91-56d35b0e487d"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352582 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-slash\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352627 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-cni-bin\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352653 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-run-systemd\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352675 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8a137a24-65f1-4414-8617-03504e56b5e7-ovn-node-metrics-cert\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352695 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-systemd-units\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352716 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-run-netns\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352754 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8a137a24-65f1-4414-8617-03504e56b5e7-ovnkube-config\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352776 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-etc-openvswitch\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352791 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-var-lib-openvswitch\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352809 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8a137a24-65f1-4414-8617-03504e56b5e7-ovnkube-script-lib\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352827 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-run-ovn-kubernetes\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352942 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-node-log\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.352993 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz85f\" (UniqueName: \"kubernetes.io/projected/8a137a24-65f1-4414-8617-03504e56b5e7-kube-api-access-rz85f\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.353016 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8a137a24-65f1-4414-8617-03504e56b5e7-env-overrides\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.353037 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-kubelet\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.353050 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-log-socket\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.353073 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.353093 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-run-openvswitch\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.353112 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-cni-netd\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.353135 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-run-ovn\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.353233 4919 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/af48d482-2587-4521-ba91-56d35b0e487d-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.353247 4919 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/af48d482-2587-4521-ba91-56d35b0e487d-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.353256 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jx9g\" (UniqueName: \"kubernetes.io/projected/af48d482-2587-4521-ba91-56d35b0e487d-kube-api-access-9jx9g\") on node \"crc\" DevicePath \"\"" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454392 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-run-systemd\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454464 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8a137a24-65f1-4414-8617-03504e56b5e7-ovn-node-metrics-cert\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454498 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-systemd-units\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454514 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-run-systemd\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454573 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-run-netns\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454578 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-systemd-units\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454534 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-run-netns\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454649 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8a137a24-65f1-4414-8617-03504e56b5e7-ovnkube-config\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454689 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-var-lib-openvswitch\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454720 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-etc-openvswitch\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454759 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8a137a24-65f1-4414-8617-03504e56b5e7-ovnkube-script-lib\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454791 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-run-ovn-kubernetes\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454826 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-node-log\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454832 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-etc-openvswitch\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454859 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz85f\" (UniqueName: \"kubernetes.io/projected/8a137a24-65f1-4414-8617-03504e56b5e7-kube-api-access-rz85f\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454876 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-run-ovn-kubernetes\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454891 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8a137a24-65f1-4414-8617-03504e56b5e7-env-overrides\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454900 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-node-log\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454956 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-kubelet\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454924 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-kubelet\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455003 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-log-socket\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455036 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455070 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-run-openvswitch\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455097 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-cni-netd\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455134 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-run-ovn\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455178 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-slash\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455230 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.454845 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-var-lib-openvswitch\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455281 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-log-socket\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455313 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-cni-netd\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455319 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-cni-bin\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455341 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-run-openvswitch\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455410 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-cni-bin\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455437 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-host-slash\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455460 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8a137a24-65f1-4414-8617-03504e56b5e7-run-ovn\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455861 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8a137a24-65f1-4414-8617-03504e56b5e7-ovnkube-script-lib\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455966 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8a137a24-65f1-4414-8617-03504e56b5e7-ovnkube-config\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.455982 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8a137a24-65f1-4414-8617-03504e56b5e7-env-overrides\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.460033 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8a137a24-65f1-4414-8617-03504e56b5e7-ovn-node-metrics-cert\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.482262 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz85f\" (UniqueName: \"kubernetes.io/projected/8a137a24-65f1-4414-8617-03504e56b5e7-kube-api-access-rz85f\") pod \"ovnkube-node-jr4hc\" (UID: \"8a137a24-65f1-4414-8617-03504e56b5e7\") " pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.589893 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:26 crc kubenswrapper[4919]: W0930 20:23:26.617871 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a137a24_65f1_4414_8617_03504e56b5e7.slice/crio-cbc5b2e2199c336df307344ada91fd499393a163700f2c43be8a9400caca922c WatchSource:0}: Error finding container cbc5b2e2199c336df307344ada91fd499393a163700f2c43be8a9400caca922c: Status 404 returned error can't find the container with id cbc5b2e2199c336df307344ada91fd499393a163700f2c43be8a9400caca922c Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.809265 4919 generic.go:334] "Generic (PLEG): container finished" podID="8a137a24-65f1-4414-8617-03504e56b5e7" containerID="bf635c6cbeba1e987ae35596b4d1fc14eaa44d49639de265a67b9f5d6b8b7b82" exitCode=0 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.809341 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" event={"ID":"8a137a24-65f1-4414-8617-03504e56b5e7","Type":"ContainerDied","Data":"bf635c6cbeba1e987ae35596b4d1fc14eaa44d49639de265a67b9f5d6b8b7b82"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.809371 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" event={"ID":"8a137a24-65f1-4414-8617-03504e56b5e7","Type":"ContainerStarted","Data":"cbc5b2e2199c336df307344ada91fd499393a163700f2c43be8a9400caca922c"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.813382 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb371a63-6d82-453e-930e-656710b97f10" containerID="a845129a4d8c98980a8f13a053b2a40edd21a9b836def7381c0f8f8c97e05996" exitCode=0 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.813457 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerDied","Data":"a845129a4d8c98980a8f13a053b2a40edd21a9b836def7381c0f8f8c97e05996"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.813497 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"233411e098bbdd508df400a23be94bf9227b0271eb6d0d9c0dd1c95d19986660"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.813523 4919 scope.go:117] "RemoveContainer" containerID="5a35d751e5b4d240d41cc223a5740a55816947b68cfc97f9ad89a3a19385bfe6" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.819925 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovnkube-controller/3.log" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.829774 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovn-acl-logging/0.log" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.833135 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-4p25c_af48d482-2587-4521-ba91-56d35b0e487d/ovn-controller/0.log" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.833866 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1" exitCode=0 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.833901 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826" exitCode=0 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.833915 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1" exitCode=0 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.833928 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e" exitCode=0 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.833940 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459" exitCode=0 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.833951 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851" exitCode=0 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.833964 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44" exitCode=143 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.833975 4919 generic.go:334] "Generic (PLEG): container finished" podID="af48d482-2587-4521-ba91-56d35b0e487d" containerID="ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba" exitCode=143 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834031 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834067 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834084 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834101 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834117 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834133 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834149 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834165 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834174 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834183 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834192 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834202 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834273 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834285 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834294 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834306 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834319 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834334 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834345 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834354 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834364 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834373 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834381 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834390 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834399 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834409 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834418 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834431 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834444 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834455 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834463 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834471 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834480 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834489 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834498 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834507 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834517 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834525 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834537 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" event={"ID":"af48d482-2587-4521-ba91-56d35b0e487d","Type":"ContainerDied","Data":"d4fcb3b0835be6a2d0e8731449093aefd3e1f127203cadd09a92a3eb443ec0f9"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834550 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834560 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834569 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834578 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834587 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834596 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834604 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834612 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834621 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834630 4919 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.834765 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-4p25c" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.847141 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-c5crr_e3e33a72-0a49-4944-a2c2-ac16183942cf/kube-multus/2.log" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.847955 4919 scope.go:117] "RemoveContainer" containerID="70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.848723 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-c5crr_e3e33a72-0a49-4944-a2c2-ac16183942cf/kube-multus/1.log" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.848788 4919 generic.go:334] "Generic (PLEG): container finished" podID="e3e33a72-0a49-4944-a2c2-ac16183942cf" containerID="87b1b6f7c7b9294d3e1f2cf6de4cc7f91699916fcbc8ea3e63c60eeede5a5879" exitCode=2 Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.848838 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-c5crr" event={"ID":"e3e33a72-0a49-4944-a2c2-ac16183942cf","Type":"ContainerDied","Data":"87b1b6f7c7b9294d3e1f2cf6de4cc7f91699916fcbc8ea3e63c60eeede5a5879"} Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.849678 4919 scope.go:117] "RemoveContainer" containerID="87b1b6f7c7b9294d3e1f2cf6de4cc7f91699916fcbc8ea3e63c60eeede5a5879" Sep 30 20:23:26 crc kubenswrapper[4919]: E0930 20:23:26.850059 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-c5crr_openshift-multus(e3e33a72-0a49-4944-a2c2-ac16183942cf)\"" pod="openshift-multus/multus-c5crr" podUID="e3e33a72-0a49-4944-a2c2-ac16183942cf" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.879302 4919 scope.go:117] "RemoveContainer" containerID="01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.896307 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4p25c"] Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.901900 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-4p25c"] Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.950272 4919 scope.go:117] "RemoveContainer" containerID="8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.971293 4919 scope.go:117] "RemoveContainer" containerID="c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1" Sep 30 20:23:26 crc kubenswrapper[4919]: I0930 20:23:26.987016 4919 scope.go:117] "RemoveContainer" containerID="3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.001534 4919 scope.go:117] "RemoveContainer" containerID="391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.018645 4919 scope.go:117] "RemoveContainer" containerID="e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.052639 4919 scope.go:117] "RemoveContainer" containerID="42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.070066 4919 scope.go:117] "RemoveContainer" containerID="ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.084391 4919 scope.go:117] "RemoveContainer" containerID="fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.097437 4919 scope.go:117] "RemoveContainer" containerID="70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1" Sep 30 20:23:27 crc kubenswrapper[4919]: E0930 20:23:27.098100 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1\": container with ID starting with 70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1 not found: ID does not exist" containerID="70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.098163 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1"} err="failed to get container status \"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1\": rpc error: code = NotFound desc = could not find container \"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1\": container with ID starting with 70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.098202 4919 scope.go:117] "RemoveContainer" containerID="01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce" Sep 30 20:23:27 crc kubenswrapper[4919]: E0930 20:23:27.098779 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\": container with ID starting with 01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce not found: ID does not exist" containerID="01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.098822 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce"} err="failed to get container status \"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\": rpc error: code = NotFound desc = could not find container \"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\": container with ID starting with 01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.098848 4919 scope.go:117] "RemoveContainer" containerID="8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826" Sep 30 20:23:27 crc kubenswrapper[4919]: E0930 20:23:27.099189 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\": container with ID starting with 8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826 not found: ID does not exist" containerID="8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.099226 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826"} err="failed to get container status \"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\": rpc error: code = NotFound desc = could not find container \"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\": container with ID starting with 8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.099243 4919 scope.go:117] "RemoveContainer" containerID="c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1" Sep 30 20:23:27 crc kubenswrapper[4919]: E0930 20:23:27.099612 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\": container with ID starting with c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1 not found: ID does not exist" containerID="c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.099646 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1"} err="failed to get container status \"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\": rpc error: code = NotFound desc = could not find container \"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\": container with ID starting with c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.099666 4919 scope.go:117] "RemoveContainer" containerID="3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e" Sep 30 20:23:27 crc kubenswrapper[4919]: E0930 20:23:27.099956 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\": container with ID starting with 3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e not found: ID does not exist" containerID="3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.099985 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e"} err="failed to get container status \"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\": rpc error: code = NotFound desc = could not find container \"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\": container with ID starting with 3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.100001 4919 scope.go:117] "RemoveContainer" containerID="391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459" Sep 30 20:23:27 crc kubenswrapper[4919]: E0930 20:23:27.100301 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\": container with ID starting with 391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459 not found: ID does not exist" containerID="391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.100325 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459"} err="failed to get container status \"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\": rpc error: code = NotFound desc = could not find container \"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\": container with ID starting with 391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.100340 4919 scope.go:117] "RemoveContainer" containerID="e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851" Sep 30 20:23:27 crc kubenswrapper[4919]: E0930 20:23:27.100604 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\": container with ID starting with e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851 not found: ID does not exist" containerID="e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.100629 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851"} err="failed to get container status \"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\": rpc error: code = NotFound desc = could not find container \"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\": container with ID starting with e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.100644 4919 scope.go:117] "RemoveContainer" containerID="42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44" Sep 30 20:23:27 crc kubenswrapper[4919]: E0930 20:23:27.100916 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\": container with ID starting with 42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44 not found: ID does not exist" containerID="42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.100964 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44"} err="failed to get container status \"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\": rpc error: code = NotFound desc = could not find container \"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\": container with ID starting with 42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.100980 4919 scope.go:117] "RemoveContainer" containerID="ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba" Sep 30 20:23:27 crc kubenswrapper[4919]: E0930 20:23:27.101281 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\": container with ID starting with ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba not found: ID does not exist" containerID="ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.101301 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba"} err="failed to get container status \"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\": rpc error: code = NotFound desc = could not find container \"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\": container with ID starting with ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.101326 4919 scope.go:117] "RemoveContainer" containerID="fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341" Sep 30 20:23:27 crc kubenswrapper[4919]: E0930 20:23:27.101745 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\": container with ID starting with fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341 not found: ID does not exist" containerID="fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.101828 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341"} err="failed to get container status \"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\": rpc error: code = NotFound desc = could not find container \"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\": container with ID starting with fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.101859 4919 scope.go:117] "RemoveContainer" containerID="70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.102318 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1"} err="failed to get container status \"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1\": rpc error: code = NotFound desc = could not find container \"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1\": container with ID starting with 70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.102346 4919 scope.go:117] "RemoveContainer" containerID="01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.102611 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce"} err="failed to get container status \"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\": rpc error: code = NotFound desc = could not find container \"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\": container with ID starting with 01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.102634 4919 scope.go:117] "RemoveContainer" containerID="8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.102914 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826"} err="failed to get container status \"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\": rpc error: code = NotFound desc = could not find container \"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\": container with ID starting with 8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.102949 4919 scope.go:117] "RemoveContainer" containerID="c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.103393 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1"} err="failed to get container status \"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\": rpc error: code = NotFound desc = could not find container \"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\": container with ID starting with c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.103414 4919 scope.go:117] "RemoveContainer" containerID="3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.103707 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e"} err="failed to get container status \"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\": rpc error: code = NotFound desc = could not find container \"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\": container with ID starting with 3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.103742 4919 scope.go:117] "RemoveContainer" containerID="391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.104012 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459"} err="failed to get container status \"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\": rpc error: code = NotFound desc = could not find container \"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\": container with ID starting with 391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.104048 4919 scope.go:117] "RemoveContainer" containerID="e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.104419 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851"} err="failed to get container status \"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\": rpc error: code = NotFound desc = could not find container \"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\": container with ID starting with e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.104464 4919 scope.go:117] "RemoveContainer" containerID="42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.104833 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44"} err="failed to get container status \"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\": rpc error: code = NotFound desc = could not find container \"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\": container with ID starting with 42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.104853 4919 scope.go:117] "RemoveContainer" containerID="ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.105071 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba"} err="failed to get container status \"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\": rpc error: code = NotFound desc = could not find container \"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\": container with ID starting with ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.105092 4919 scope.go:117] "RemoveContainer" containerID="fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.105685 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341"} err="failed to get container status \"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\": rpc error: code = NotFound desc = could not find container \"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\": container with ID starting with fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.105731 4919 scope.go:117] "RemoveContainer" containerID="70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.106023 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1"} err="failed to get container status \"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1\": rpc error: code = NotFound desc = could not find container \"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1\": container with ID starting with 70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.106048 4919 scope.go:117] "RemoveContainer" containerID="01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.108281 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce"} err="failed to get container status \"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\": rpc error: code = NotFound desc = could not find container \"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\": container with ID starting with 01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.108357 4919 scope.go:117] "RemoveContainer" containerID="8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.109366 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826"} err="failed to get container status \"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\": rpc error: code = NotFound desc = could not find container \"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\": container with ID starting with 8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.109448 4919 scope.go:117] "RemoveContainer" containerID="c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.110117 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1"} err="failed to get container status \"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\": rpc error: code = NotFound desc = could not find container \"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\": container with ID starting with c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.110166 4919 scope.go:117] "RemoveContainer" containerID="3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.110501 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e"} err="failed to get container status \"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\": rpc error: code = NotFound desc = could not find container \"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\": container with ID starting with 3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.110529 4919 scope.go:117] "RemoveContainer" containerID="391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.110861 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459"} err="failed to get container status \"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\": rpc error: code = NotFound desc = could not find container \"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\": container with ID starting with 391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.110967 4919 scope.go:117] "RemoveContainer" containerID="e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.112320 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851"} err="failed to get container status \"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\": rpc error: code = NotFound desc = could not find container \"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\": container with ID starting with e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.112347 4919 scope.go:117] "RemoveContainer" containerID="42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.113098 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44"} err="failed to get container status \"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\": rpc error: code = NotFound desc = could not find container \"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\": container with ID starting with 42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.113132 4919 scope.go:117] "RemoveContainer" containerID="ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.113792 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba"} err="failed to get container status \"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\": rpc error: code = NotFound desc = could not find container \"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\": container with ID starting with ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.113837 4919 scope.go:117] "RemoveContainer" containerID="fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.114187 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341"} err="failed to get container status \"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\": rpc error: code = NotFound desc = could not find container \"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\": container with ID starting with fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.114292 4919 scope.go:117] "RemoveContainer" containerID="70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.114782 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1"} err="failed to get container status \"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1\": rpc error: code = NotFound desc = could not find container \"70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1\": container with ID starting with 70f07a6dc73ad20245740ca55bd5761d11822dbbf29e8e6c9faf6bb9f5c6f6f1 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.114810 4919 scope.go:117] "RemoveContainer" containerID="01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.115288 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce"} err="failed to get container status \"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\": rpc error: code = NotFound desc = could not find container \"01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce\": container with ID starting with 01a9491e04eea152828b537628b703777b9ac0109888fc85a8a34b881a2883ce not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.115393 4919 scope.go:117] "RemoveContainer" containerID="8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.115791 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826"} err="failed to get container status \"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\": rpc error: code = NotFound desc = could not find container \"8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826\": container with ID starting with 8542839286de97042e5d21f5a0234925159e57a43f63606f55cacf5285740826 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.115818 4919 scope.go:117] "RemoveContainer" containerID="c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.116205 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1"} err="failed to get container status \"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\": rpc error: code = NotFound desc = could not find container \"c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1\": container with ID starting with c490936a2dbf168a11c9d037432c4e9c8b1b075f9a3ed60ae64ecbf7d1b599c1 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.116259 4919 scope.go:117] "RemoveContainer" containerID="3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.116571 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e"} err="failed to get container status \"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\": rpc error: code = NotFound desc = could not find container \"3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e\": container with ID starting with 3e11911212c2b7282333fc283eca270fadc7c943c2b0195ee0e30d983778027e not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.116613 4919 scope.go:117] "RemoveContainer" containerID="391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.117254 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459"} err="failed to get container status \"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\": rpc error: code = NotFound desc = could not find container \"391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459\": container with ID starting with 391b7a4ae93b43bf83125bc12579814bea6f65f12f81edf106624984473e2459 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.117293 4919 scope.go:117] "RemoveContainer" containerID="e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.117633 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851"} err="failed to get container status \"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\": rpc error: code = NotFound desc = could not find container \"e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851\": container with ID starting with e7fd0ec0487547c771421df836ee3d35bf74a0d2fd93806b40e4005543e42851 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.117679 4919 scope.go:117] "RemoveContainer" containerID="42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.118067 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44"} err="failed to get container status \"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\": rpc error: code = NotFound desc = could not find container \"42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44\": container with ID starting with 42f6162585671330ba628db78d74a8df9f3ad838c6b90a9d904857d039ff1c44 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.118107 4919 scope.go:117] "RemoveContainer" containerID="ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.118375 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba"} err="failed to get container status \"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\": rpc error: code = NotFound desc = could not find container \"ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba\": container with ID starting with ffb65d99813d045926307ef061a7250df84034642db87cc25dd7c4fc18e162ba not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.118397 4919 scope.go:117] "RemoveContainer" containerID="fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.120671 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341"} err="failed to get container status \"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\": rpc error: code = NotFound desc = could not find container \"fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341\": container with ID starting with fe2a8330d05a63df3fb0a282f19c662535532771f78679a1deb9c79693825341 not found: ID does not exist" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.120986 4919 scope.go:117] "RemoveContainer" containerID="aea57d9136d938eb7249c500e655688ca888930b02435845a1dcdf1712da37fb" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.646515 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af48d482-2587-4521-ba91-56d35b0e487d" path="/var/lib/kubelet/pods/af48d482-2587-4521-ba91-56d35b0e487d/volumes" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.869743 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-c5crr_e3e33a72-0a49-4944-a2c2-ac16183942cf/kube-multus/2.log" Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.877681 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" event={"ID":"8a137a24-65f1-4414-8617-03504e56b5e7","Type":"ContainerStarted","Data":"371fb7edf7554302acfc83ee41a8715acafd030efa36142dba4a8bbb2166b9e1"} Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.877791 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" event={"ID":"8a137a24-65f1-4414-8617-03504e56b5e7","Type":"ContainerStarted","Data":"0837755777d17253018a51c819b256936d876d5fd447408f175bf013f359806e"} Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.877879 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" event={"ID":"8a137a24-65f1-4414-8617-03504e56b5e7","Type":"ContainerStarted","Data":"43765752513be2091a78711ceae90b0db82e17e861b5d138300901668ef71985"} Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.877958 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" event={"ID":"8a137a24-65f1-4414-8617-03504e56b5e7","Type":"ContainerStarted","Data":"d36f83b03500ac94646d8bbb09c2f90dc59f280bf243b677f963b57485363ab8"} Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.877994 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" event={"ID":"8a137a24-65f1-4414-8617-03504e56b5e7","Type":"ContainerStarted","Data":"6bbd1d55aeb60fdac4d67f9cbe276c51f4a0008f64e4166055bb06e91b0bbfc6"} Sep 30 20:23:27 crc kubenswrapper[4919]: I0930 20:23:27.878080 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" event={"ID":"8a137a24-65f1-4414-8617-03504e56b5e7","Type":"ContainerStarted","Data":"fefd1ac7be64945794ea3bc4f9533d542a2a8e39bf4bebb017d5f154eed73c28"} Sep 30 20:23:29 crc kubenswrapper[4919]: I0930 20:23:29.900144 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" event={"ID":"8a137a24-65f1-4414-8617-03504e56b5e7","Type":"ContainerStarted","Data":"c4d96229eb8deebf40dc045a21a072e2ea79f3b12317efceac936ff1f0a10531"} Sep 30 20:23:32 crc kubenswrapper[4919]: I0930 20:23:32.932769 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" event={"ID":"8a137a24-65f1-4414-8617-03504e56b5e7","Type":"ContainerStarted","Data":"b76c58a67076ed34d7e5a2fd9a931c741afd96a377003e1ac8a013a480a73acb"} Sep 30 20:23:32 crc kubenswrapper[4919]: I0930 20:23:32.934402 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:32 crc kubenswrapper[4919]: I0930 20:23:32.934485 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:32 crc kubenswrapper[4919]: I0930 20:23:32.934508 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:32 crc kubenswrapper[4919]: I0930 20:23:32.966864 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:32 crc kubenswrapper[4919]: I0930 20:23:32.971747 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:23:32 crc kubenswrapper[4919]: I0930 20:23:32.975846 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" podStartSLOduration=6.975832987 podStartE2EDuration="6.975832987s" podCreationTimestamp="2025-09-30 20:23:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:23:32.975537408 +0000 UTC m=+598.091570625" watchObservedRunningTime="2025-09-30 20:23:32.975832987 +0000 UTC m=+598.091866124" Sep 30 20:23:38 crc kubenswrapper[4919]: I0930 20:23:38.631933 4919 scope.go:117] "RemoveContainer" containerID="87b1b6f7c7b9294d3e1f2cf6de4cc7f91699916fcbc8ea3e63c60eeede5a5879" Sep 30 20:23:38 crc kubenswrapper[4919]: E0930 20:23:38.632920 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-c5crr_openshift-multus(e3e33a72-0a49-4944-a2c2-ac16183942cf)\"" pod="openshift-multus/multus-c5crr" podUID="e3e33a72-0a49-4944-a2c2-ac16183942cf" Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.632633 4919 scope.go:117] "RemoveContainer" containerID="87b1b6f7c7b9294d3e1f2cf6de4cc7f91699916fcbc8ea3e63c60eeede5a5879" Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.745929 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872"] Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.747882 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.751080 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.773428 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872"] Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.886493 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f9cc61b3-cd89-4636-a3dd-60788041f808-bundle\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872\" (UID: \"f9cc61b3-cd89-4636-a3dd-60788041f808\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.886734 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wntlg\" (UniqueName: \"kubernetes.io/projected/f9cc61b3-cd89-4636-a3dd-60788041f808-kube-api-access-wntlg\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872\" (UID: \"f9cc61b3-cd89-4636-a3dd-60788041f808\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.886936 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f9cc61b3-cd89-4636-a3dd-60788041f808-util\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872\" (UID: \"f9cc61b3-cd89-4636-a3dd-60788041f808\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.968131 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj"] Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.969764 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.983573 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj"] Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.988708 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wntlg\" (UniqueName: \"kubernetes.io/projected/f9cc61b3-cd89-4636-a3dd-60788041f808-kube-api-access-wntlg\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872\" (UID: \"f9cc61b3-cd89-4636-a3dd-60788041f808\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.988812 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f9cc61b3-cd89-4636-a3dd-60788041f808-util\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872\" (UID: \"f9cc61b3-cd89-4636-a3dd-60788041f808\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.988911 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f9cc61b3-cd89-4636-a3dd-60788041f808-bundle\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872\" (UID: \"f9cc61b3-cd89-4636-a3dd-60788041f808\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.989804 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f9cc61b3-cd89-4636-a3dd-60788041f808-bundle\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872\" (UID: \"f9cc61b3-cd89-4636-a3dd-60788041f808\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:49 crc kubenswrapper[4919]: I0930 20:23:49.989846 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f9cc61b3-cd89-4636-a3dd-60788041f808-util\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872\" (UID: \"f9cc61b3-cd89-4636-a3dd-60788041f808\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.030991 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wntlg\" (UniqueName: \"kubernetes.io/projected/f9cc61b3-cd89-4636-a3dd-60788041f808-kube-api-access-wntlg\") pod \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872\" (UID: \"f9cc61b3-cd89-4636-a3dd-60788041f808\") " pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.040332 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-c5crr_e3e33a72-0a49-4944-a2c2-ac16183942cf/kube-multus/2.log" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.040404 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-c5crr" event={"ID":"e3e33a72-0a49-4944-a2c2-ac16183942cf","Type":"ContainerStarted","Data":"518170efc226051add5ba0f77139c36effdc539b2aae8318d52e0a4108982d8e"} Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.068746 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.090479 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9a753e04-c280-40ea-bce3-2803f7a30e1d-util\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj\" (UID: \"9a753e04-c280-40ea-bce3-2803f7a30e1d\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.090580 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9a753e04-c280-40ea-bce3-2803f7a30e1d-bundle\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj\" (UID: \"9a753e04-c280-40ea-bce3-2803f7a30e1d\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.090776 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7rs5\" (UniqueName: \"kubernetes.io/projected/9a753e04-c280-40ea-bce3-2803f7a30e1d-kube-api-access-h7rs5\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj\" (UID: \"9a753e04-c280-40ea-bce3-2803f7a30e1d\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:50 crc kubenswrapper[4919]: E0930 20:23:50.113473 4919 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_openshift-marketplace_f9cc61b3-cd89-4636-a3dd-60788041f808_0(5d88fb472e5b78866439159358ce63f70ec9fbce5ec9406df103c2120f329699): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 20:23:50 crc kubenswrapper[4919]: E0930 20:23:50.113992 4919 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_openshift-marketplace_f9cc61b3-cd89-4636-a3dd-60788041f808_0(5d88fb472e5b78866439159358ce63f70ec9fbce5ec9406df103c2120f329699): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:50 crc kubenswrapper[4919]: E0930 20:23:50.114044 4919 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_openshift-marketplace_f9cc61b3-cd89-4636-a3dd-60788041f808_0(5d88fb472e5b78866439159358ce63f70ec9fbce5ec9406df103c2120f329699): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:50 crc kubenswrapper[4919]: E0930 20:23:50.114149 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_openshift-marketplace(f9cc61b3-cd89-4636-a3dd-60788041f808)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_openshift-marketplace(f9cc61b3-cd89-4636-a3dd-60788041f808)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_openshift-marketplace_f9cc61b3-cd89-4636-a3dd-60788041f808_0(5d88fb472e5b78866439159358ce63f70ec9fbce5ec9406df103c2120f329699): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" podUID="f9cc61b3-cd89-4636-a3dd-60788041f808" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.191933 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7rs5\" (UniqueName: \"kubernetes.io/projected/9a753e04-c280-40ea-bce3-2803f7a30e1d-kube-api-access-h7rs5\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj\" (UID: \"9a753e04-c280-40ea-bce3-2803f7a30e1d\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.192069 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9a753e04-c280-40ea-bce3-2803f7a30e1d-util\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj\" (UID: \"9a753e04-c280-40ea-bce3-2803f7a30e1d\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.192107 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9a753e04-c280-40ea-bce3-2803f7a30e1d-bundle\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj\" (UID: \"9a753e04-c280-40ea-bce3-2803f7a30e1d\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.192601 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9a753e04-c280-40ea-bce3-2803f7a30e1d-util\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj\" (UID: \"9a753e04-c280-40ea-bce3-2803f7a30e1d\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.192635 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9a753e04-c280-40ea-bce3-2803f7a30e1d-bundle\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj\" (UID: \"9a753e04-c280-40ea-bce3-2803f7a30e1d\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.216030 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7rs5\" (UniqueName: \"kubernetes.io/projected/9a753e04-c280-40ea-bce3-2803f7a30e1d-kube-api-access-h7rs5\") pod \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj\" (UID: \"9a753e04-c280-40ea-bce3-2803f7a30e1d\") " pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:50 crc kubenswrapper[4919]: I0930 20:23:50.299706 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:50 crc kubenswrapper[4919]: E0930 20:23:50.333490 4919 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_openshift-marketplace_9a753e04-c280-40ea-bce3-2803f7a30e1d_0(f5696d9910dd19b0af7d565443bb2d8f4829080741662ecdcf0bf9b5a9ee2f97): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 20:23:50 crc kubenswrapper[4919]: E0930 20:23:50.333587 4919 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_openshift-marketplace_9a753e04-c280-40ea-bce3-2803f7a30e1d_0(f5696d9910dd19b0af7d565443bb2d8f4829080741662ecdcf0bf9b5a9ee2f97): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:50 crc kubenswrapper[4919]: E0930 20:23:50.333632 4919 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_openshift-marketplace_9a753e04-c280-40ea-bce3-2803f7a30e1d_0(f5696d9910dd19b0af7d565443bb2d8f4829080741662ecdcf0bf9b5a9ee2f97): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:50 crc kubenswrapper[4919]: E0930 20:23:50.333711 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_openshift-marketplace(9a753e04-c280-40ea-bce3-2803f7a30e1d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_openshift-marketplace(9a753e04-c280-40ea-bce3-2803f7a30e1d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_openshift-marketplace_9a753e04-c280-40ea-bce3-2803f7a30e1d_0(f5696d9910dd19b0af7d565443bb2d8f4829080741662ecdcf0bf9b5a9ee2f97): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" podUID="9a753e04-c280-40ea-bce3-2803f7a30e1d" Sep 30 20:23:51 crc kubenswrapper[4919]: I0930 20:23:51.046702 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:51 crc kubenswrapper[4919]: I0930 20:23:51.046788 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:51 crc kubenswrapper[4919]: I0930 20:23:51.047448 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:51 crc kubenswrapper[4919]: I0930 20:23:51.047519 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:51 crc kubenswrapper[4919]: E0930 20:23:51.100918 4919 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_openshift-marketplace_f9cc61b3-cd89-4636-a3dd-60788041f808_0(442dc7df3955c2016315e19f8e58813472c949e223928fa2608761b81a4428ac): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 20:23:51 crc kubenswrapper[4919]: E0930 20:23:51.100995 4919 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_openshift-marketplace_f9cc61b3-cd89-4636-a3dd-60788041f808_0(442dc7df3955c2016315e19f8e58813472c949e223928fa2608761b81a4428ac): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:51 crc kubenswrapper[4919]: E0930 20:23:51.101035 4919 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_openshift-marketplace_f9cc61b3-cd89-4636-a3dd-60788041f808_0(442dc7df3955c2016315e19f8e58813472c949e223928fa2608761b81a4428ac): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:23:51 crc kubenswrapper[4919]: E0930 20:23:51.101145 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_openshift-marketplace(f9cc61b3-cd89-4636-a3dd-60788041f808)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_openshift-marketplace(f9cc61b3-cd89-4636-a3dd-60788041f808)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_openshift-marketplace_f9cc61b3-cd89-4636-a3dd-60788041f808_0(442dc7df3955c2016315e19f8e58813472c949e223928fa2608761b81a4428ac): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" podUID="f9cc61b3-cd89-4636-a3dd-60788041f808" Sep 30 20:23:51 crc kubenswrapper[4919]: E0930 20:23:51.117697 4919 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_openshift-marketplace_9a753e04-c280-40ea-bce3-2803f7a30e1d_0(be7446c753f1f96a292e2c7337c78f6c4f9a997cf2ff4cba22a69fefa3da2e09): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 30 20:23:51 crc kubenswrapper[4919]: E0930 20:23:51.117930 4919 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_openshift-marketplace_9a753e04-c280-40ea-bce3-2803f7a30e1d_0(be7446c753f1f96a292e2c7337c78f6c4f9a997cf2ff4cba22a69fefa3da2e09): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:51 crc kubenswrapper[4919]: E0930 20:23:51.118092 4919 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_openshift-marketplace_9a753e04-c280-40ea-bce3-2803f7a30e1d_0(be7446c753f1f96a292e2c7337c78f6c4f9a997cf2ff4cba22a69fefa3da2e09): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:23:51 crc kubenswrapper[4919]: E0930 20:23:51.118333 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_openshift-marketplace(9a753e04-c280-40ea-bce3-2803f7a30e1d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_openshift-marketplace(9a753e04-c280-40ea-bce3-2803f7a30e1d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_openshift-marketplace_9a753e04-c280-40ea-bce3-2803f7a30e1d_0(be7446c753f1f96a292e2c7337c78f6c4f9a997cf2ff4cba22a69fefa3da2e09): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" podUID="9a753e04-c280-40ea-bce3-2803f7a30e1d" Sep 30 20:23:56 crc kubenswrapper[4919]: I0930 20:23:56.632096 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-jr4hc" Sep 30 20:24:04 crc kubenswrapper[4919]: I0930 20:24:04.631196 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:24:04 crc kubenswrapper[4919]: I0930 20:24:04.631564 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:24:04 crc kubenswrapper[4919]: I0930 20:24:04.632361 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:24:04 crc kubenswrapper[4919]: I0930 20:24:04.632867 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:24:04 crc kubenswrapper[4919]: I0930 20:24:04.922279 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872"] Sep 30 20:24:04 crc kubenswrapper[4919]: I0930 20:24:04.971772 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj"] Sep 30 20:24:04 crc kubenswrapper[4919]: W0930 20:24:04.993734 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a753e04_c280_40ea_bce3_2803f7a30e1d.slice/crio-34f9af73702b23cb1e30ea3efca9b62afcfd4f9d7b0d69afa8bf3062ce1c4f1b WatchSource:0}: Error finding container 34f9af73702b23cb1e30ea3efca9b62afcfd4f9d7b0d69afa8bf3062ce1c4f1b: Status 404 returned error can't find the container with id 34f9af73702b23cb1e30ea3efca9b62afcfd4f9d7b0d69afa8bf3062ce1c4f1b Sep 30 20:24:05 crc kubenswrapper[4919]: I0930 20:24:05.144462 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" event={"ID":"f9cc61b3-cd89-4636-a3dd-60788041f808","Type":"ContainerStarted","Data":"ce15c3345c5aeda6a90af10829b3c283e475ccba9f1933fe5dd5df21936443aa"} Sep 30 20:24:05 crc kubenswrapper[4919]: I0930 20:24:05.144514 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" event={"ID":"f9cc61b3-cd89-4636-a3dd-60788041f808","Type":"ContainerStarted","Data":"04894b8cda2a319030c41dc5b9ee1e1df9503766b25802d0c40117b3feffde52"} Sep 30 20:24:05 crc kubenswrapper[4919]: I0930 20:24:05.147350 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" event={"ID":"9a753e04-c280-40ea-bce3-2803f7a30e1d","Type":"ContainerStarted","Data":"e7b8dbd2abca7753e044542f94828df7f86d4096a79fe6a47888cfe1d73888d3"} Sep 30 20:24:05 crc kubenswrapper[4919]: I0930 20:24:05.147418 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" event={"ID":"9a753e04-c280-40ea-bce3-2803f7a30e1d","Type":"ContainerStarted","Data":"34f9af73702b23cb1e30ea3efca9b62afcfd4f9d7b0d69afa8bf3062ce1c4f1b"} Sep 30 20:24:06 crc kubenswrapper[4919]: I0930 20:24:06.158454 4919 generic.go:334] "Generic (PLEG): container finished" podID="f9cc61b3-cd89-4636-a3dd-60788041f808" containerID="ce15c3345c5aeda6a90af10829b3c283e475ccba9f1933fe5dd5df21936443aa" exitCode=0 Sep 30 20:24:06 crc kubenswrapper[4919]: I0930 20:24:06.158537 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" event={"ID":"f9cc61b3-cd89-4636-a3dd-60788041f808","Type":"ContainerDied","Data":"ce15c3345c5aeda6a90af10829b3c283e475ccba9f1933fe5dd5df21936443aa"} Sep 30 20:24:06 crc kubenswrapper[4919]: I0930 20:24:06.161169 4919 generic.go:334] "Generic (PLEG): container finished" podID="9a753e04-c280-40ea-bce3-2803f7a30e1d" containerID="e7b8dbd2abca7753e044542f94828df7f86d4096a79fe6a47888cfe1d73888d3" exitCode=0 Sep 30 20:24:06 crc kubenswrapper[4919]: I0930 20:24:06.161251 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" event={"ID":"9a753e04-c280-40ea-bce3-2803f7a30e1d","Type":"ContainerDied","Data":"e7b8dbd2abca7753e044542f94828df7f86d4096a79fe6a47888cfe1d73888d3"} Sep 30 20:24:08 crc kubenswrapper[4919]: I0930 20:24:08.178896 4919 generic.go:334] "Generic (PLEG): container finished" podID="9a753e04-c280-40ea-bce3-2803f7a30e1d" containerID="049e66f1c4e0fcbba1a997aed95de3d4acf654d287425ae04aea0d63c34a7f6f" exitCode=0 Sep 30 20:24:08 crc kubenswrapper[4919]: I0930 20:24:08.178995 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" event={"ID":"9a753e04-c280-40ea-bce3-2803f7a30e1d","Type":"ContainerDied","Data":"049e66f1c4e0fcbba1a997aed95de3d4acf654d287425ae04aea0d63c34a7f6f"} Sep 30 20:24:09 crc kubenswrapper[4919]: I0930 20:24:09.198400 4919 generic.go:334] "Generic (PLEG): container finished" podID="f9cc61b3-cd89-4636-a3dd-60788041f808" containerID="3294b09c92e066e30506b692c29401c122befd099eaffc9704a2659f0afd4b89" exitCode=0 Sep 30 20:24:09 crc kubenswrapper[4919]: I0930 20:24:09.198578 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" event={"ID":"f9cc61b3-cd89-4636-a3dd-60788041f808","Type":"ContainerDied","Data":"3294b09c92e066e30506b692c29401c122befd099eaffc9704a2659f0afd4b89"} Sep 30 20:24:09 crc kubenswrapper[4919]: I0930 20:24:09.204863 4919 generic.go:334] "Generic (PLEG): container finished" podID="9a753e04-c280-40ea-bce3-2803f7a30e1d" containerID="b91bb5d4f4ab556fbff16f3cf12e7cf9173dcddbadccf5b39917ffd43d252331" exitCode=0 Sep 30 20:24:09 crc kubenswrapper[4919]: I0930 20:24:09.204942 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" event={"ID":"9a753e04-c280-40ea-bce3-2803f7a30e1d","Type":"ContainerDied","Data":"b91bb5d4f4ab556fbff16f3cf12e7cf9173dcddbadccf5b39917ffd43d252331"} Sep 30 20:24:10 crc kubenswrapper[4919]: I0930 20:24:10.216838 4919 generic.go:334] "Generic (PLEG): container finished" podID="f9cc61b3-cd89-4636-a3dd-60788041f808" containerID="a5f70d202354ad95273fa8e6de52d4f05217794631b3c84a87ed5801eae7728d" exitCode=0 Sep 30 20:24:10 crc kubenswrapper[4919]: I0930 20:24:10.216949 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" event={"ID":"f9cc61b3-cd89-4636-a3dd-60788041f808","Type":"ContainerDied","Data":"a5f70d202354ad95273fa8e6de52d4f05217794631b3c84a87ed5801eae7728d"} Sep 30 20:24:10 crc kubenswrapper[4919]: I0930 20:24:10.536127 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:24:10 crc kubenswrapper[4919]: I0930 20:24:10.578541 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7rs5\" (UniqueName: \"kubernetes.io/projected/9a753e04-c280-40ea-bce3-2803f7a30e1d-kube-api-access-h7rs5\") pod \"9a753e04-c280-40ea-bce3-2803f7a30e1d\" (UID: \"9a753e04-c280-40ea-bce3-2803f7a30e1d\") " Sep 30 20:24:10 crc kubenswrapper[4919]: I0930 20:24:10.578635 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9a753e04-c280-40ea-bce3-2803f7a30e1d-bundle\") pod \"9a753e04-c280-40ea-bce3-2803f7a30e1d\" (UID: \"9a753e04-c280-40ea-bce3-2803f7a30e1d\") " Sep 30 20:24:10 crc kubenswrapper[4919]: I0930 20:24:10.578711 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9a753e04-c280-40ea-bce3-2803f7a30e1d-util\") pod \"9a753e04-c280-40ea-bce3-2803f7a30e1d\" (UID: \"9a753e04-c280-40ea-bce3-2803f7a30e1d\") " Sep 30 20:24:10 crc kubenswrapper[4919]: I0930 20:24:10.579993 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a753e04-c280-40ea-bce3-2803f7a30e1d-bundle" (OuterVolumeSpecName: "bundle") pod "9a753e04-c280-40ea-bce3-2803f7a30e1d" (UID: "9a753e04-c280-40ea-bce3-2803f7a30e1d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:24:10 crc kubenswrapper[4919]: I0930 20:24:10.585128 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a753e04-c280-40ea-bce3-2803f7a30e1d-kube-api-access-h7rs5" (OuterVolumeSpecName: "kube-api-access-h7rs5") pod "9a753e04-c280-40ea-bce3-2803f7a30e1d" (UID: "9a753e04-c280-40ea-bce3-2803f7a30e1d"). InnerVolumeSpecName "kube-api-access-h7rs5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:24:10 crc kubenswrapper[4919]: I0930 20:24:10.680078 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7rs5\" (UniqueName: \"kubernetes.io/projected/9a753e04-c280-40ea-bce3-2803f7a30e1d-kube-api-access-h7rs5\") on node \"crc\" DevicePath \"\"" Sep 30 20:24:10 crc kubenswrapper[4919]: I0930 20:24:10.680120 4919 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9a753e04-c280-40ea-bce3-2803f7a30e1d-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.022457 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a753e04-c280-40ea-bce3-2803f7a30e1d-util" (OuterVolumeSpecName: "util") pod "9a753e04-c280-40ea-bce3-2803f7a30e1d" (UID: "9a753e04-c280-40ea-bce3-2803f7a30e1d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.086088 4919 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9a753e04-c280-40ea-bce3-2803f7a30e1d-util\") on node \"crc\" DevicePath \"\"" Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.228674 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" event={"ID":"9a753e04-c280-40ea-bce3-2803f7a30e1d","Type":"ContainerDied","Data":"34f9af73702b23cb1e30ea3efca9b62afcfd4f9d7b0d69afa8bf3062ce1c4f1b"} Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.228716 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj" Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.228744 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34f9af73702b23cb1e30ea3efca9b62afcfd4f9d7b0d69afa8bf3062ce1c4f1b" Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.537291 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.594549 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f9cc61b3-cd89-4636-a3dd-60788041f808-util\") pod \"f9cc61b3-cd89-4636-a3dd-60788041f808\" (UID: \"f9cc61b3-cd89-4636-a3dd-60788041f808\") " Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.594680 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f9cc61b3-cd89-4636-a3dd-60788041f808-bundle\") pod \"f9cc61b3-cd89-4636-a3dd-60788041f808\" (UID: \"f9cc61b3-cd89-4636-a3dd-60788041f808\") " Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.594720 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wntlg\" (UniqueName: \"kubernetes.io/projected/f9cc61b3-cd89-4636-a3dd-60788041f808-kube-api-access-wntlg\") pod \"f9cc61b3-cd89-4636-a3dd-60788041f808\" (UID: \"f9cc61b3-cd89-4636-a3dd-60788041f808\") " Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.596697 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9cc61b3-cd89-4636-a3dd-60788041f808-bundle" (OuterVolumeSpecName: "bundle") pod "f9cc61b3-cd89-4636-a3dd-60788041f808" (UID: "f9cc61b3-cd89-4636-a3dd-60788041f808"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.598971 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9cc61b3-cd89-4636-a3dd-60788041f808-kube-api-access-wntlg" (OuterVolumeSpecName: "kube-api-access-wntlg") pod "f9cc61b3-cd89-4636-a3dd-60788041f808" (UID: "f9cc61b3-cd89-4636-a3dd-60788041f808"). InnerVolumeSpecName "kube-api-access-wntlg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.618697 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9cc61b3-cd89-4636-a3dd-60788041f808-util" (OuterVolumeSpecName: "util") pod "f9cc61b3-cd89-4636-a3dd-60788041f808" (UID: "f9cc61b3-cd89-4636-a3dd-60788041f808"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.695418 4919 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f9cc61b3-cd89-4636-a3dd-60788041f808-util\") on node \"crc\" DevicePath \"\"" Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.695448 4919 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f9cc61b3-cd89-4636-a3dd-60788041f808-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:24:11 crc kubenswrapper[4919]: I0930 20:24:11.695462 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wntlg\" (UniqueName: \"kubernetes.io/projected/f9cc61b3-cd89-4636-a3dd-60788041f808-kube-api-access-wntlg\") on node \"crc\" DevicePath \"\"" Sep 30 20:24:12 crc kubenswrapper[4919]: I0930 20:24:12.239109 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" event={"ID":"f9cc61b3-cd89-4636-a3dd-60788041f808","Type":"ContainerDied","Data":"04894b8cda2a319030c41dc5b9ee1e1df9503766b25802d0c40117b3feffde52"} Sep 30 20:24:12 crc kubenswrapper[4919]: I0930 20:24:12.240205 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04894b8cda2a319030c41dc5b9ee1e1df9503766b25802d0c40117b3feffde52" Sep 30 20:24:12 crc kubenswrapper[4919]: I0930 20:24:12.239259 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.904718 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/cluster-logging-operator-fcc886d58-tgthv"] Sep 30 20:24:18 crc kubenswrapper[4919]: E0930 20:24:18.905571 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9cc61b3-cd89-4636-a3dd-60788041f808" containerName="pull" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.905588 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9cc61b3-cd89-4636-a3dd-60788041f808" containerName="pull" Sep 30 20:24:18 crc kubenswrapper[4919]: E0930 20:24:18.905601 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a753e04-c280-40ea-bce3-2803f7a30e1d" containerName="extract" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.905608 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a753e04-c280-40ea-bce3-2803f7a30e1d" containerName="extract" Sep 30 20:24:18 crc kubenswrapper[4919]: E0930 20:24:18.905625 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a753e04-c280-40ea-bce3-2803f7a30e1d" containerName="pull" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.905633 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a753e04-c280-40ea-bce3-2803f7a30e1d" containerName="pull" Sep 30 20:24:18 crc kubenswrapper[4919]: E0930 20:24:18.905645 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9cc61b3-cd89-4636-a3dd-60788041f808" containerName="util" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.905651 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9cc61b3-cd89-4636-a3dd-60788041f808" containerName="util" Sep 30 20:24:18 crc kubenswrapper[4919]: E0930 20:24:18.905661 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9cc61b3-cd89-4636-a3dd-60788041f808" containerName="extract" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.905668 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9cc61b3-cd89-4636-a3dd-60788041f808" containerName="extract" Sep 30 20:24:18 crc kubenswrapper[4919]: E0930 20:24:18.905678 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a753e04-c280-40ea-bce3-2803f7a30e1d" containerName="util" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.905684 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a753e04-c280-40ea-bce3-2803f7a30e1d" containerName="util" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.905794 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a753e04-c280-40ea-bce3-2803f7a30e1d" containerName="extract" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.905805 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9cc61b3-cd89-4636-a3dd-60788041f808" containerName="extract" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.906274 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.908253 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"cluster-logging-operator-dockercfg-cvnhs" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.908785 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"kube-root-ca.crt" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.908957 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"openshift-service-ca.crt" Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.916699 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-fcc886d58-tgthv"] Sep 30 20:24:18 crc kubenswrapper[4919]: I0930 20:24:18.985196 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bks9h\" (UniqueName: \"kubernetes.io/projected/83bfe237-5002-4f48-a10a-f6966ed9120c-kube-api-access-bks9h\") pod \"cluster-logging-operator-fcc886d58-tgthv\" (UID: \"83bfe237-5002-4f48-a10a-f6966ed9120c\") " pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" Sep 30 20:24:19 crc kubenswrapper[4919]: I0930 20:24:19.087152 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bks9h\" (UniqueName: \"kubernetes.io/projected/83bfe237-5002-4f48-a10a-f6966ed9120c-kube-api-access-bks9h\") pod \"cluster-logging-operator-fcc886d58-tgthv\" (UID: \"83bfe237-5002-4f48-a10a-f6966ed9120c\") " pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" Sep 30 20:24:19 crc kubenswrapper[4919]: I0930 20:24:19.108836 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bks9h\" (UniqueName: \"kubernetes.io/projected/83bfe237-5002-4f48-a10a-f6966ed9120c-kube-api-access-bks9h\") pod \"cluster-logging-operator-fcc886d58-tgthv\" (UID: \"83bfe237-5002-4f48-a10a-f6966ed9120c\") " pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" Sep 30 20:24:19 crc kubenswrapper[4919]: I0930 20:24:19.224171 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" Sep 30 20:24:19 crc kubenswrapper[4919]: I0930 20:24:19.452266 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/cluster-logging-operator-fcc886d58-tgthv"] Sep 30 20:24:20 crc kubenswrapper[4919]: I0930 20:24:20.294168 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" event={"ID":"83bfe237-5002-4f48-a10a-f6966ed9120c","Type":"ContainerStarted","Data":"73b44d627b85c8553b033f6cba2b75bbb1441ff232179878513154bb1c98f061"} Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.332885 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" event={"ID":"83bfe237-5002-4f48-a10a-f6966ed9120c","Type":"ContainerStarted","Data":"09ea728e94087c7fd5b782d3a0aa4074cfbcd1c2680834703a49efb9fdc3cad3"} Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.346827 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" podStartSLOduration=1.999730858 podStartE2EDuration="8.346805758s" podCreationTimestamp="2025-09-30 20:24:18 +0000 UTC" firstStartedPulling="2025-09-30 20:24:19.460066676 +0000 UTC m=+644.576099793" lastFinishedPulling="2025-09-30 20:24:25.807141566 +0000 UTC m=+650.923174693" observedRunningTime="2025-09-30 20:24:26.344515442 +0000 UTC m=+651.460548569" watchObservedRunningTime="2025-09-30 20:24:26.346805758 +0000 UTC m=+651.462838895" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.700696 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf"] Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.701707 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.703945 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.704341 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.704517 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-9c5st" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.704931 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.706038 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.708189 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.720534 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf"] Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.832095 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bed3f41f-9f7d-4838-a3a2-3ed58371a416-webhook-cert\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.832143 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/bed3f41f-9f7d-4838-a3a2-3ed58371a416-manager-config\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.832175 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rkxj\" (UniqueName: \"kubernetes.io/projected/bed3f41f-9f7d-4838-a3a2-3ed58371a416-kube-api-access-9rkxj\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.832268 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bed3f41f-9f7d-4838-a3a2-3ed58371a416-apiservice-cert\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.832302 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bed3f41f-9f7d-4838-a3a2-3ed58371a416-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.933185 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rkxj\" (UniqueName: \"kubernetes.io/projected/bed3f41f-9f7d-4838-a3a2-3ed58371a416-kube-api-access-9rkxj\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.933340 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bed3f41f-9f7d-4838-a3a2-3ed58371a416-apiservice-cert\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.933375 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bed3f41f-9f7d-4838-a3a2-3ed58371a416-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.933417 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bed3f41f-9f7d-4838-a3a2-3ed58371a416-webhook-cert\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.933439 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/bed3f41f-9f7d-4838-a3a2-3ed58371a416-manager-config\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.934535 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/bed3f41f-9f7d-4838-a3a2-3ed58371a416-manager-config\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.939985 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bed3f41f-9f7d-4838-a3a2-3ed58371a416-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.940644 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bed3f41f-9f7d-4838-a3a2-3ed58371a416-apiservice-cert\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.942516 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bed3f41f-9f7d-4838-a3a2-3ed58371a416-webhook-cert\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:26 crc kubenswrapper[4919]: I0930 20:24:26.961185 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rkxj\" (UniqueName: \"kubernetes.io/projected/bed3f41f-9f7d-4838-a3a2-3ed58371a416-kube-api-access-9rkxj\") pod \"loki-operator-controller-manager-69dd967c6d-g6dkf\" (UID: \"bed3f41f-9f7d-4838-a3a2-3ed58371a416\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:27 crc kubenswrapper[4919]: I0930 20:24:27.015698 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:27 crc kubenswrapper[4919]: I0930 20:24:27.285776 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf"] Sep 30 20:24:27 crc kubenswrapper[4919]: W0930 20:24:27.296925 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbed3f41f_9f7d_4838_a3a2_3ed58371a416.slice/crio-3df13546e99c3d85ed4736ccce6a530781156db1094eb85701bb9d06fbf76761 WatchSource:0}: Error finding container 3df13546e99c3d85ed4736ccce6a530781156db1094eb85701bb9d06fbf76761: Status 404 returned error can't find the container with id 3df13546e99c3d85ed4736ccce6a530781156db1094eb85701bb9d06fbf76761 Sep 30 20:24:27 crc kubenswrapper[4919]: I0930 20:24:27.337374 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" event={"ID":"bed3f41f-9f7d-4838-a3a2-3ed58371a416","Type":"ContainerStarted","Data":"3df13546e99c3d85ed4736ccce6a530781156db1094eb85701bb9d06fbf76761"} Sep 30 20:24:30 crc kubenswrapper[4919]: I0930 20:24:30.352978 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" event={"ID":"bed3f41f-9f7d-4838-a3a2-3ed58371a416","Type":"ContainerStarted","Data":"f2c383953386032527c4b6cbea35f3bcc32a17542fff11133d3ee59d33e8037c"} Sep 30 20:24:37 crc kubenswrapper[4919]: I0930 20:24:37.394987 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" event={"ID":"bed3f41f-9f7d-4838-a3a2-3ed58371a416","Type":"ContainerStarted","Data":"04dde735911e4c24c1c6a121757a265aa4974f3e6d46cba23567e521fba2bee9"} Sep 30 20:24:37 crc kubenswrapper[4919]: I0930 20:24:37.396912 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:37 crc kubenswrapper[4919]: I0930 20:24:37.399429 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" Sep 30 20:24:37 crc kubenswrapper[4919]: I0930 20:24:37.439599 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-69dd967c6d-g6dkf" podStartSLOduration=1.623616187 podStartE2EDuration="11.43958365s" podCreationTimestamp="2025-09-30 20:24:26 +0000 UTC" firstStartedPulling="2025-09-30 20:24:27.298567349 +0000 UTC m=+652.414600476" lastFinishedPulling="2025-09-30 20:24:37.114534802 +0000 UTC m=+662.230567939" observedRunningTime="2025-09-30 20:24:37.437981563 +0000 UTC m=+662.554014690" watchObservedRunningTime="2025-09-30 20:24:37.43958365 +0000 UTC m=+662.555616777" Sep 30 20:24:42 crc kubenswrapper[4919]: I0930 20:24:42.793615 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Sep 30 20:24:42 crc kubenswrapper[4919]: I0930 20:24:42.795182 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Sep 30 20:24:42 crc kubenswrapper[4919]: I0930 20:24:42.798398 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Sep 30 20:24:42 crc kubenswrapper[4919]: I0930 20:24:42.799674 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Sep 30 20:24:42 crc kubenswrapper[4919]: I0930 20:24:42.804738 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Sep 30 20:24:42 crc kubenswrapper[4919]: I0930 20:24:42.877089 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxvdb\" (UniqueName: \"kubernetes.io/projected/64fca293-3ca6-4148-a12c-6d25aed32013-kube-api-access-sxvdb\") pod \"minio\" (UID: \"64fca293-3ca6-4148-a12c-6d25aed32013\") " pod="minio-dev/minio" Sep 30 20:24:42 crc kubenswrapper[4919]: I0930 20:24:42.877384 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e22fd1d9-c0b3-40d3-a481-8828922b1ea5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e22fd1d9-c0b3-40d3-a481-8828922b1ea5\") pod \"minio\" (UID: \"64fca293-3ca6-4148-a12c-6d25aed32013\") " pod="minio-dev/minio" Sep 30 20:24:42 crc kubenswrapper[4919]: I0930 20:24:42.978916 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e22fd1d9-c0b3-40d3-a481-8828922b1ea5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e22fd1d9-c0b3-40d3-a481-8828922b1ea5\") pod \"minio\" (UID: \"64fca293-3ca6-4148-a12c-6d25aed32013\") " pod="minio-dev/minio" Sep 30 20:24:42 crc kubenswrapper[4919]: I0930 20:24:42.979003 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxvdb\" (UniqueName: \"kubernetes.io/projected/64fca293-3ca6-4148-a12c-6d25aed32013-kube-api-access-sxvdb\") pod \"minio\" (UID: \"64fca293-3ca6-4148-a12c-6d25aed32013\") " pod="minio-dev/minio" Sep 30 20:24:42 crc kubenswrapper[4919]: I0930 20:24:42.982074 4919 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 20:24:42 crc kubenswrapper[4919]: I0930 20:24:42.982113 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e22fd1d9-c0b3-40d3-a481-8828922b1ea5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e22fd1d9-c0b3-40d3-a481-8828922b1ea5\") pod \"minio\" (UID: \"64fca293-3ca6-4148-a12c-6d25aed32013\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/55ce972c471ac6aef82ec2cdaae86baca13d5ffe84e284744afdac8581dbec11/globalmount\"" pod="minio-dev/minio" Sep 30 20:24:43 crc kubenswrapper[4919]: I0930 20:24:43.001908 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxvdb\" (UniqueName: \"kubernetes.io/projected/64fca293-3ca6-4148-a12c-6d25aed32013-kube-api-access-sxvdb\") pod \"minio\" (UID: \"64fca293-3ca6-4148-a12c-6d25aed32013\") " pod="minio-dev/minio" Sep 30 20:24:43 crc kubenswrapper[4919]: I0930 20:24:43.012577 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e22fd1d9-c0b3-40d3-a481-8828922b1ea5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e22fd1d9-c0b3-40d3-a481-8828922b1ea5\") pod \"minio\" (UID: \"64fca293-3ca6-4148-a12c-6d25aed32013\") " pod="minio-dev/minio" Sep 30 20:24:43 crc kubenswrapper[4919]: I0930 20:24:43.173325 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Sep 30 20:24:43 crc kubenswrapper[4919]: I0930 20:24:43.425694 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Sep 30 20:24:43 crc kubenswrapper[4919]: W0930 20:24:43.433801 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64fca293_3ca6_4148_a12c_6d25aed32013.slice/crio-11c3f3a7a789f52985dd20e3f93621dbd865ae8bc43655794b54eee825bd6bef WatchSource:0}: Error finding container 11c3f3a7a789f52985dd20e3f93621dbd865ae8bc43655794b54eee825bd6bef: Status 404 returned error can't find the container with id 11c3f3a7a789f52985dd20e3f93621dbd865ae8bc43655794b54eee825bd6bef Sep 30 20:24:44 crc kubenswrapper[4919]: I0930 20:24:44.436763 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"64fca293-3ca6-4148-a12c-6d25aed32013","Type":"ContainerStarted","Data":"11c3f3a7a789f52985dd20e3f93621dbd865ae8bc43655794b54eee825bd6bef"} Sep 30 20:24:48 crc kubenswrapper[4919]: I0930 20:24:48.476117 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"64fca293-3ca6-4148-a12c-6d25aed32013","Type":"ContainerStarted","Data":"ec3bc4a1c2f64920c3815d03b7df5b93516e4fc8f8865cffe7b6d86c4bc841d9"} Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.569572 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=12.653187394 podStartE2EDuration="16.569552106s" podCreationTimestamp="2025-09-30 20:24:39 +0000 UTC" firstStartedPulling="2025-09-30 20:24:43.436595624 +0000 UTC m=+668.552628761" lastFinishedPulling="2025-09-30 20:24:47.352960336 +0000 UTC m=+672.468993473" observedRunningTime="2025-09-30 20:24:48.509928497 +0000 UTC m=+673.625961624" watchObservedRunningTime="2025-09-30 20:24:55.569552106 +0000 UTC m=+680.685585243" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.573518 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-logging/collector-vcpp6"] Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.574501 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.577379 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-metrics" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.578986 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-syslog-receiver" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.579330 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-token" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.589339 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-logging"/"collector-dockercfg-gswp7" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.590865 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-config" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.591612 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-logging"/"collector-trustbundle" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.625151 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-vcpp6"] Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.755991 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw75l\" (UniqueName: \"kubernetes.io/projected/2edf0652-09cd-4eb0-915b-2fa6e0554a36-kube-api-access-bw75l\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.756036 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-config\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.756062 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-entrypoint\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.756088 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-trusted-ca\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.756106 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-metrics\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.756206 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/2edf0652-09cd-4eb0-915b-2fa6e0554a36-datadir\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.756323 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-collector-syslog-receiver\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.756348 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-collector-token\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.756374 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-config-openshift-service-cacrt\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.756423 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/2edf0652-09cd-4eb0-915b-2fa6e0554a36-sa-token\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.756472 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/2edf0652-09cd-4eb0-915b-2fa6e0554a36-tmp\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.857600 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw75l\" (UniqueName: \"kubernetes.io/projected/2edf0652-09cd-4eb0-915b-2fa6e0554a36-kube-api-access-bw75l\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.857638 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-config\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.857675 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-entrypoint\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.857697 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-trusted-ca\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.857715 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-metrics\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.857735 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/2edf0652-09cd-4eb0-915b-2fa6e0554a36-datadir\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.857758 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-collector-syslog-receiver\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.857774 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-collector-token\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.857790 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-config-openshift-service-cacrt\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.857812 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/2edf0652-09cd-4eb0-915b-2fa6e0554a36-sa-token\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.857833 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/2edf0652-09cd-4eb0-915b-2fa6e0554a36-tmp\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.858032 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/2edf0652-09cd-4eb0-915b-2fa6e0554a36-datadir\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.858919 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-config\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.858945 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-config-openshift-service-cacrt\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.859026 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-entrypoint\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.859386 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-trusted-ca\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.863412 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-collector-token\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.864497 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-metrics\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.871564 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-collector-syslog-receiver\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.873918 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/2edf0652-09cd-4eb0-915b-2fa6e0554a36-tmp\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.874574 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/2edf0652-09cd-4eb0-915b-2fa6e0554a36-sa-token\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.874716 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw75l\" (UniqueName: \"kubernetes.io/projected/2edf0652-09cd-4eb0-915b-2fa6e0554a36-kube-api-access-bw75l\") pod \"collector-vcpp6\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " pod="openshift-logging/collector-vcpp6" Sep 30 20:24:55 crc kubenswrapper[4919]: I0930 20:24:55.904399 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-vcpp6" Sep 30 20:24:56 crc kubenswrapper[4919]: I0930 20:24:56.351338 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-logging/collector-vcpp6"] Sep 30 20:24:56 crc kubenswrapper[4919]: I0930 20:24:56.527504 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-vcpp6" event={"ID":"2edf0652-09cd-4eb0-915b-2fa6e0554a36","Type":"ContainerStarted","Data":"e4c36dcd3f014345d52aa6557aa87f5429cf926192a04277cb93e0531e501c4e"} Sep 30 20:25:03 crc kubenswrapper[4919]: I0930 20:25:03.574523 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-vcpp6" event={"ID":"2edf0652-09cd-4eb0-915b-2fa6e0554a36","Type":"ContainerStarted","Data":"875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c"} Sep 30 20:25:03 crc kubenswrapper[4919]: I0930 20:25:03.611396 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-logging/collector-vcpp6" podStartSLOduration=2.221181905 podStartE2EDuration="8.611368995s" podCreationTimestamp="2025-09-30 20:24:55 +0000 UTC" firstStartedPulling="2025-09-30 20:24:56.360250676 +0000 UTC m=+681.476283843" lastFinishedPulling="2025-09-30 20:25:02.750437796 +0000 UTC m=+687.866470933" observedRunningTime="2025-09-30 20:25:03.602728533 +0000 UTC m=+688.718761690" watchObservedRunningTime="2025-09-30 20:25:03.611368995 +0000 UTC m=+688.727402152" Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.672102 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5"] Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.675625 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.683458 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5"] Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.689690 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.768444 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c0928aa-abb7-42d8-888e-6990cd01c99a-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5\" (UID: \"1c0928aa-abb7-42d8-888e-6990cd01c99a\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.768508 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c0928aa-abb7-42d8-888e-6990cd01c99a-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5\" (UID: \"1c0928aa-abb7-42d8-888e-6990cd01c99a\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.768548 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljs5f\" (UniqueName: \"kubernetes.io/projected/1c0928aa-abb7-42d8-888e-6990cd01c99a-kube-api-access-ljs5f\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5\" (UID: \"1c0928aa-abb7-42d8-888e-6990cd01c99a\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.869753 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c0928aa-abb7-42d8-888e-6990cd01c99a-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5\" (UID: \"1c0928aa-abb7-42d8-888e-6990cd01c99a\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.869821 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c0928aa-abb7-42d8-888e-6990cd01c99a-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5\" (UID: \"1c0928aa-abb7-42d8-888e-6990cd01c99a\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.869867 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljs5f\" (UniqueName: \"kubernetes.io/projected/1c0928aa-abb7-42d8-888e-6990cd01c99a-kube-api-access-ljs5f\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5\" (UID: \"1c0928aa-abb7-42d8-888e-6990cd01c99a\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.870450 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c0928aa-abb7-42d8-888e-6990cd01c99a-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5\" (UID: \"1c0928aa-abb7-42d8-888e-6990cd01c99a\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.870515 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c0928aa-abb7-42d8-888e-6990cd01c99a-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5\" (UID: \"1c0928aa-abb7-42d8-888e-6990cd01c99a\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:13 crc kubenswrapper[4919]: I0930 20:25:13.893402 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljs5f\" (UniqueName: \"kubernetes.io/projected/1c0928aa-abb7-42d8-888e-6990cd01c99a-kube-api-access-ljs5f\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5\" (UID: \"1c0928aa-abb7-42d8-888e-6990cd01c99a\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:14 crc kubenswrapper[4919]: I0930 20:25:14.010694 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:14 crc kubenswrapper[4919]: I0930 20:25:14.266339 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5"] Sep 30 20:25:14 crc kubenswrapper[4919]: W0930 20:25:14.276690 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c0928aa_abb7_42d8_888e_6990cd01c99a.slice/crio-1a800aac05fab049bd6fd35464eb67413d20e82df89aad73dd8497721f33de46 WatchSource:0}: Error finding container 1a800aac05fab049bd6fd35464eb67413d20e82df89aad73dd8497721f33de46: Status 404 returned error can't find the container with id 1a800aac05fab049bd6fd35464eb67413d20e82df89aad73dd8497721f33de46 Sep 30 20:25:14 crc kubenswrapper[4919]: I0930 20:25:14.676839 4919 generic.go:334] "Generic (PLEG): container finished" podID="1c0928aa-abb7-42d8-888e-6990cd01c99a" containerID="3c91e1b0e15b46bb8eb876fa7904d7a0f11889dea2624c9c24816955d1783f62" exitCode=0 Sep 30 20:25:14 crc kubenswrapper[4919]: I0930 20:25:14.676908 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" event={"ID":"1c0928aa-abb7-42d8-888e-6990cd01c99a","Type":"ContainerDied","Data":"3c91e1b0e15b46bb8eb876fa7904d7a0f11889dea2624c9c24816955d1783f62"} Sep 30 20:25:14 crc kubenswrapper[4919]: I0930 20:25:14.676967 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" event={"ID":"1c0928aa-abb7-42d8-888e-6990cd01c99a","Type":"ContainerStarted","Data":"1a800aac05fab049bd6fd35464eb67413d20e82df89aad73dd8497721f33de46"} Sep 30 20:25:17 crc kubenswrapper[4919]: I0930 20:25:17.700018 4919 generic.go:334] "Generic (PLEG): container finished" podID="1c0928aa-abb7-42d8-888e-6990cd01c99a" containerID="cc2d049feb1e7a368922f7337d5b4105c157e9e99bb404c3a816e6dbbcdd676d" exitCode=0 Sep 30 20:25:17 crc kubenswrapper[4919]: I0930 20:25:17.700126 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" event={"ID":"1c0928aa-abb7-42d8-888e-6990cd01c99a","Type":"ContainerDied","Data":"cc2d049feb1e7a368922f7337d5b4105c157e9e99bb404c3a816e6dbbcdd676d"} Sep 30 20:25:18 crc kubenswrapper[4919]: I0930 20:25:18.709897 4919 generic.go:334] "Generic (PLEG): container finished" podID="1c0928aa-abb7-42d8-888e-6990cd01c99a" containerID="13a96c6cea4496f5d5cc20dac3f42f440ec40ab55730ccb596c09b5420ff937f" exitCode=0 Sep 30 20:25:18 crc kubenswrapper[4919]: I0930 20:25:18.710066 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" event={"ID":"1c0928aa-abb7-42d8-888e-6990cd01c99a","Type":"ContainerDied","Data":"13a96c6cea4496f5d5cc20dac3f42f440ec40ab55730ccb596c09b5420ff937f"} Sep 30 20:25:19 crc kubenswrapper[4919]: I0930 20:25:19.996841 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:20 crc kubenswrapper[4919]: I0930 20:25:20.153093 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c0928aa-abb7-42d8-888e-6990cd01c99a-util\") pod \"1c0928aa-abb7-42d8-888e-6990cd01c99a\" (UID: \"1c0928aa-abb7-42d8-888e-6990cd01c99a\") " Sep 30 20:25:20 crc kubenswrapper[4919]: I0930 20:25:20.153239 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljs5f\" (UniqueName: \"kubernetes.io/projected/1c0928aa-abb7-42d8-888e-6990cd01c99a-kube-api-access-ljs5f\") pod \"1c0928aa-abb7-42d8-888e-6990cd01c99a\" (UID: \"1c0928aa-abb7-42d8-888e-6990cd01c99a\") " Sep 30 20:25:20 crc kubenswrapper[4919]: I0930 20:25:20.153266 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c0928aa-abb7-42d8-888e-6990cd01c99a-bundle\") pod \"1c0928aa-abb7-42d8-888e-6990cd01c99a\" (UID: \"1c0928aa-abb7-42d8-888e-6990cd01c99a\") " Sep 30 20:25:20 crc kubenswrapper[4919]: I0930 20:25:20.154143 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c0928aa-abb7-42d8-888e-6990cd01c99a-bundle" (OuterVolumeSpecName: "bundle") pod "1c0928aa-abb7-42d8-888e-6990cd01c99a" (UID: "1c0928aa-abb7-42d8-888e-6990cd01c99a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:25:20 crc kubenswrapper[4919]: I0930 20:25:20.158701 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c0928aa-abb7-42d8-888e-6990cd01c99a-kube-api-access-ljs5f" (OuterVolumeSpecName: "kube-api-access-ljs5f") pod "1c0928aa-abb7-42d8-888e-6990cd01c99a" (UID: "1c0928aa-abb7-42d8-888e-6990cd01c99a"). InnerVolumeSpecName "kube-api-access-ljs5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:25:20 crc kubenswrapper[4919]: I0930 20:25:20.255135 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljs5f\" (UniqueName: \"kubernetes.io/projected/1c0928aa-abb7-42d8-888e-6990cd01c99a-kube-api-access-ljs5f\") on node \"crc\" DevicePath \"\"" Sep 30 20:25:20 crc kubenswrapper[4919]: I0930 20:25:20.255192 4919 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c0928aa-abb7-42d8-888e-6990cd01c99a-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:25:20 crc kubenswrapper[4919]: I0930 20:25:20.310879 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c0928aa-abb7-42d8-888e-6990cd01c99a-util" (OuterVolumeSpecName: "util") pod "1c0928aa-abb7-42d8-888e-6990cd01c99a" (UID: "1c0928aa-abb7-42d8-888e-6990cd01c99a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:25:20 crc kubenswrapper[4919]: I0930 20:25:20.356244 4919 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c0928aa-abb7-42d8-888e-6990cd01c99a-util\") on node \"crc\" DevicePath \"\"" Sep 30 20:25:20 crc kubenswrapper[4919]: I0930 20:25:20.741879 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" event={"ID":"1c0928aa-abb7-42d8-888e-6990cd01c99a","Type":"ContainerDied","Data":"1a800aac05fab049bd6fd35464eb67413d20e82df89aad73dd8497721f33de46"} Sep 30 20:25:20 crc kubenswrapper[4919]: I0930 20:25:20.741959 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a800aac05fab049bd6fd35464eb67413d20e82df89aad73dd8497721f33de46" Sep 30 20:25:20 crc kubenswrapper[4919]: I0930 20:25:20.741997 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5" Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.871151 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-6l8tm"] Sep 30 20:25:21 crc kubenswrapper[4919]: E0930 20:25:21.871753 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c0928aa-abb7-42d8-888e-6990cd01c99a" containerName="pull" Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.871771 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c0928aa-abb7-42d8-888e-6990cd01c99a" containerName="pull" Sep 30 20:25:21 crc kubenswrapper[4919]: E0930 20:25:21.871790 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c0928aa-abb7-42d8-888e-6990cd01c99a" containerName="util" Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.871799 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c0928aa-abb7-42d8-888e-6990cd01c99a" containerName="util" Sep 30 20:25:21 crc kubenswrapper[4919]: E0930 20:25:21.871812 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c0928aa-abb7-42d8-888e-6990cd01c99a" containerName="extract" Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.871820 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c0928aa-abb7-42d8-888e-6990cd01c99a" containerName="extract" Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.871928 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c0928aa-abb7-42d8-888e-6990cd01c99a" containerName="extract" Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.872317 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6l8tm" Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.877799 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-t6qn7" Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.877973 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.878483 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnlln\" (UniqueName: \"kubernetes.io/projected/89dca143-9969-4919-9dc9-1eeb1d4614e9-kube-api-access-fnlln\") pod \"nmstate-operator-5d6f6cfd66-6l8tm\" (UID: \"89dca143-9969-4919-9dc9-1eeb1d4614e9\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6l8tm" Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.878510 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.885765 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-6l8tm"] Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.979746 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnlln\" (UniqueName: \"kubernetes.io/projected/89dca143-9969-4919-9dc9-1eeb1d4614e9-kube-api-access-fnlln\") pod \"nmstate-operator-5d6f6cfd66-6l8tm\" (UID: \"89dca143-9969-4919-9dc9-1eeb1d4614e9\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6l8tm" Sep 30 20:25:21 crc kubenswrapper[4919]: I0930 20:25:21.995418 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnlln\" (UniqueName: \"kubernetes.io/projected/89dca143-9969-4919-9dc9-1eeb1d4614e9-kube-api-access-fnlln\") pod \"nmstate-operator-5d6f6cfd66-6l8tm\" (UID: \"89dca143-9969-4919-9dc9-1eeb1d4614e9\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6l8tm" Sep 30 20:25:22 crc kubenswrapper[4919]: I0930 20:25:22.185834 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6l8tm" Sep 30 20:25:22 crc kubenswrapper[4919]: I0930 20:25:22.441052 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-6l8tm"] Sep 30 20:25:22 crc kubenswrapper[4919]: I0930 20:25:22.756710 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6l8tm" event={"ID":"89dca143-9969-4919-9dc9-1eeb1d4614e9","Type":"ContainerStarted","Data":"c66bde5eb0014d52b2ff8b7186666f75898a5d95fb407bb0601e4721bdfa8637"} Sep 30 20:25:26 crc kubenswrapper[4919]: I0930 20:25:26.061824 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:25:26 crc kubenswrapper[4919]: I0930 20:25:26.062726 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:25:26 crc kubenswrapper[4919]: I0930 20:25:26.787817 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6l8tm" event={"ID":"89dca143-9969-4919-9dc9-1eeb1d4614e9","Type":"ContainerStarted","Data":"7b3daeee1c4c647f352f2c32c5a2a05e4a9a790da7136e2cccfb78be532949d3"} Sep 30 20:25:26 crc kubenswrapper[4919]: I0930 20:25:26.809735 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-6l8tm" podStartSLOduration=2.641596056 podStartE2EDuration="5.809705919s" podCreationTimestamp="2025-09-30 20:25:21 +0000 UTC" firstStartedPulling="2025-09-30 20:25:22.454438365 +0000 UTC m=+707.570471492" lastFinishedPulling="2025-09-30 20:25:25.622548228 +0000 UTC m=+710.738581355" observedRunningTime="2025-09-30 20:25:26.806596858 +0000 UTC m=+711.922630025" watchObservedRunningTime="2025-09-30 20:25:26.809705919 +0000 UTC m=+711.925739086" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.768896 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-jvwwd"] Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.771120 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-jvwwd" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.773428 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-rkmgm" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.789821 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-jvwwd"] Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.815185 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-kdt4l"] Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.816334 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.827636 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q"] Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.828784 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.830680 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.838525 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q"] Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.877168 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlmx5\" (UniqueName: \"kubernetes.io/projected/503e0849-5f67-41ad-b1d8-3ebd8c23cc09-kube-api-access-zlmx5\") pod \"nmstate-metrics-58fcddf996-jvwwd\" (UID: \"503e0849-5f67-41ad-b1d8-3ebd8c23cc09\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-jvwwd" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.918525 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj"] Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.919166 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.920854 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.921147 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.921267 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-hcnvw" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.930693 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj"] Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.978661 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b6a02ba1-cd2d-408a-8037-2f277448c7cf-dbus-socket\") pod \"nmstate-handler-kdt4l\" (UID: \"b6a02ba1-cd2d-408a-8037-2f277448c7cf\") " pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.978709 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b6a02ba1-cd2d-408a-8037-2f277448c7cf-ovs-socket\") pod \"nmstate-handler-kdt4l\" (UID: \"b6a02ba1-cd2d-408a-8037-2f277448c7cf\") " pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.978775 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a3942c38-5d58-41de-9bdb-afd674081e1e-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-7kc5q\" (UID: \"a3942c38-5d58-41de-9bdb-afd674081e1e\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.978802 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htjck\" (UniqueName: \"kubernetes.io/projected/b6a02ba1-cd2d-408a-8037-2f277448c7cf-kube-api-access-htjck\") pod \"nmstate-handler-kdt4l\" (UID: \"b6a02ba1-cd2d-408a-8037-2f277448c7cf\") " pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.978837 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b6a02ba1-cd2d-408a-8037-2f277448c7cf-nmstate-lock\") pod \"nmstate-handler-kdt4l\" (UID: \"b6a02ba1-cd2d-408a-8037-2f277448c7cf\") " pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.978996 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hrgd\" (UniqueName: \"kubernetes.io/projected/a3942c38-5d58-41de-9bdb-afd674081e1e-kube-api-access-4hrgd\") pod \"nmstate-webhook-6d689559c5-7kc5q\" (UID: \"a3942c38-5d58-41de-9bdb-afd674081e1e\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" Sep 30 20:25:27 crc kubenswrapper[4919]: I0930 20:25:27.979055 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlmx5\" (UniqueName: \"kubernetes.io/projected/503e0849-5f67-41ad-b1d8-3ebd8c23cc09-kube-api-access-zlmx5\") pod \"nmstate-metrics-58fcddf996-jvwwd\" (UID: \"503e0849-5f67-41ad-b1d8-3ebd8c23cc09\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-jvwwd" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.003659 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlmx5\" (UniqueName: \"kubernetes.io/projected/503e0849-5f67-41ad-b1d8-3ebd8c23cc09-kube-api-access-zlmx5\") pod \"nmstate-metrics-58fcddf996-jvwwd\" (UID: \"503e0849-5f67-41ad-b1d8-3ebd8c23cc09\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-jvwwd" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.080408 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpnkd\" (UniqueName: \"kubernetes.io/projected/097f3e59-c8c4-4f4e-9d97-e6d402584649-kube-api-access-vpnkd\") pod \"nmstate-console-plugin-864bb6dfb5-4mdbj\" (UID: \"097f3e59-c8c4-4f4e-9d97-e6d402584649\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.080468 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b6a02ba1-cd2d-408a-8037-2f277448c7cf-dbus-socket\") pod \"nmstate-handler-kdt4l\" (UID: \"b6a02ba1-cd2d-408a-8037-2f277448c7cf\") " pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.080485 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b6a02ba1-cd2d-408a-8037-2f277448c7cf-ovs-socket\") pod \"nmstate-handler-kdt4l\" (UID: \"b6a02ba1-cd2d-408a-8037-2f277448c7cf\") " pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.080525 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a3942c38-5d58-41de-9bdb-afd674081e1e-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-7kc5q\" (UID: \"a3942c38-5d58-41de-9bdb-afd674081e1e\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.080544 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htjck\" (UniqueName: \"kubernetes.io/projected/b6a02ba1-cd2d-408a-8037-2f277448c7cf-kube-api-access-htjck\") pod \"nmstate-handler-kdt4l\" (UID: \"b6a02ba1-cd2d-408a-8037-2f277448c7cf\") " pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.080564 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/097f3e59-c8c4-4f4e-9d97-e6d402584649-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-4mdbj\" (UID: \"097f3e59-c8c4-4f4e-9d97-e6d402584649\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.080580 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b6a02ba1-cd2d-408a-8037-2f277448c7cf-nmstate-lock\") pod \"nmstate-handler-kdt4l\" (UID: \"b6a02ba1-cd2d-408a-8037-2f277448c7cf\") " pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.080617 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hrgd\" (UniqueName: \"kubernetes.io/projected/a3942c38-5d58-41de-9bdb-afd674081e1e-kube-api-access-4hrgd\") pod \"nmstate-webhook-6d689559c5-7kc5q\" (UID: \"a3942c38-5d58-41de-9bdb-afd674081e1e\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.080643 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/097f3e59-c8c4-4f4e-9d97-e6d402584649-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-4mdbj\" (UID: \"097f3e59-c8c4-4f4e-9d97-e6d402584649\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.080966 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/b6a02ba1-cd2d-408a-8037-2f277448c7cf-dbus-socket\") pod \"nmstate-handler-kdt4l\" (UID: \"b6a02ba1-cd2d-408a-8037-2f277448c7cf\") " pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.080969 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/b6a02ba1-cd2d-408a-8037-2f277448c7cf-nmstate-lock\") pod \"nmstate-handler-kdt4l\" (UID: \"b6a02ba1-cd2d-408a-8037-2f277448c7cf\") " pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.080974 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/b6a02ba1-cd2d-408a-8037-2f277448c7cf-ovs-socket\") pod \"nmstate-handler-kdt4l\" (UID: \"b6a02ba1-cd2d-408a-8037-2f277448c7cf\") " pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.084398 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/a3942c38-5d58-41de-9bdb-afd674081e1e-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-7kc5q\" (UID: \"a3942c38-5d58-41de-9bdb-afd674081e1e\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.092278 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-jvwwd" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.095798 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htjck\" (UniqueName: \"kubernetes.io/projected/b6a02ba1-cd2d-408a-8037-2f277448c7cf-kube-api-access-htjck\") pod \"nmstate-handler-kdt4l\" (UID: \"b6a02ba1-cd2d-408a-8037-2f277448c7cf\") " pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.097113 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hrgd\" (UniqueName: \"kubernetes.io/projected/a3942c38-5d58-41de-9bdb-afd674081e1e-kube-api-access-4hrgd\") pod \"nmstate-webhook-6d689559c5-7kc5q\" (UID: \"a3942c38-5d58-41de-9bdb-afd674081e1e\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.123076 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-69c56c6c65-5dw25"] Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.124010 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.128227 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-69c56c6c65-5dw25"] Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.137314 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.148889 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" Sep 30 20:25:28 crc kubenswrapper[4919]: W0930 20:25:28.159618 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6a02ba1_cd2d_408a_8037_2f277448c7cf.slice/crio-7f3d735462efa99b83bdb6c17e3c0177f1c4e57b064691cafd75f935251aebbd WatchSource:0}: Error finding container 7f3d735462efa99b83bdb6c17e3c0177f1c4e57b064691cafd75f935251aebbd: Status 404 returned error can't find the container with id 7f3d735462efa99b83bdb6c17e3c0177f1c4e57b064691cafd75f935251aebbd Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.182471 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/097f3e59-c8c4-4f4e-9d97-e6d402584649-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-4mdbj\" (UID: \"097f3e59-c8c4-4f4e-9d97-e6d402584649\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.182678 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/097f3e59-c8c4-4f4e-9d97-e6d402584649-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-4mdbj\" (UID: \"097f3e59-c8c4-4f4e-9d97-e6d402584649\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" Sep 30 20:25:28 crc kubenswrapper[4919]: E0930 20:25:28.182703 4919 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Sep 30 20:25:28 crc kubenswrapper[4919]: E0930 20:25:28.182770 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/097f3e59-c8c4-4f4e-9d97-e6d402584649-plugin-serving-cert podName:097f3e59-c8c4-4f4e-9d97-e6d402584649 nodeName:}" failed. No retries permitted until 2025-09-30 20:25:28.682749008 +0000 UTC m=+713.798782215 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/097f3e59-c8c4-4f4e-9d97-e6d402584649-plugin-serving-cert") pod "nmstate-console-plugin-864bb6dfb5-4mdbj" (UID: "097f3e59-c8c4-4f4e-9d97-e6d402584649") : secret "plugin-serving-cert" not found Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.182715 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpnkd\" (UniqueName: \"kubernetes.io/projected/097f3e59-c8c4-4f4e-9d97-e6d402584649-kube-api-access-vpnkd\") pod \"nmstate-console-plugin-864bb6dfb5-4mdbj\" (UID: \"097f3e59-c8c4-4f4e-9d97-e6d402584649\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.183579 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/097f3e59-c8c4-4f4e-9d97-e6d402584649-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-4mdbj\" (UID: \"097f3e59-c8c4-4f4e-9d97-e6d402584649\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.199874 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpnkd\" (UniqueName: \"kubernetes.io/projected/097f3e59-c8c4-4f4e-9d97-e6d402584649-kube-api-access-vpnkd\") pod \"nmstate-console-plugin-864bb6dfb5-4mdbj\" (UID: \"097f3e59-c8c4-4f4e-9d97-e6d402584649\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.284076 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-service-ca\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.284127 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-console-config\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.284149 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-console-serving-cert\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.284170 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-oauth-serving-cert\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.284193 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-trusted-ca-bundle\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.284211 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7drh\" (UniqueName: \"kubernetes.io/projected/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-kube-api-access-q7drh\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.286797 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-console-oauth-config\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.377931 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q"] Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.388306 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-console-serving-cert\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.388380 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-oauth-serving-cert\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.388433 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-trusted-ca-bundle\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.388480 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7drh\" (UniqueName: \"kubernetes.io/projected/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-kube-api-access-q7drh\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.388570 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-console-oauth-config\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.388680 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-service-ca\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.388731 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-console-config\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.389452 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-oauth-serving-cert\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.389584 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-trusted-ca-bundle\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.390161 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-console-config\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.391537 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-service-ca\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.394760 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-console-oauth-config\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.395846 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-console-serving-cert\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.411376 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7drh\" (UniqueName: \"kubernetes.io/projected/e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d-kube-api-access-q7drh\") pod \"console-69c56c6c65-5dw25\" (UID: \"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d\") " pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.501397 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.535072 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-jvwwd"] Sep 30 20:25:28 crc kubenswrapper[4919]: W0930 20:25:28.539148 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod503e0849_5f67_41ad_b1d8_3ebd8c23cc09.slice/crio-baaa52d80bce2feae739c814e537e60e136a5f261f2c3867ac2d60a4e157c68e WatchSource:0}: Error finding container baaa52d80bce2feae739c814e537e60e136a5f261f2c3867ac2d60a4e157c68e: Status 404 returned error can't find the container with id baaa52d80bce2feae739c814e537e60e136a5f261f2c3867ac2d60a4e157c68e Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.692300 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/097f3e59-c8c4-4f4e-9d97-e6d402584649-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-4mdbj\" (UID: \"097f3e59-c8c4-4f4e-9d97-e6d402584649\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.699011 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-69c56c6c65-5dw25"] Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.700890 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/097f3e59-c8c4-4f4e-9d97-e6d402584649-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-4mdbj\" (UID: \"097f3e59-c8c4-4f4e-9d97-e6d402584649\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.800157 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-69c56c6c65-5dw25" event={"ID":"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d","Type":"ContainerStarted","Data":"f81b54ea4d1caf78d1514ddc187777d5923e7c36ef3d641660b01430d94ac4d0"} Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.801648 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" event={"ID":"a3942c38-5d58-41de-9bdb-afd674081e1e","Type":"ContainerStarted","Data":"31f122cab2875d569828899738edf224c9912de0459aac28d8a6250af6570d0a"} Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.803826 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-kdt4l" event={"ID":"b6a02ba1-cd2d-408a-8037-2f277448c7cf","Type":"ContainerStarted","Data":"7f3d735462efa99b83bdb6c17e3c0177f1c4e57b064691cafd75f935251aebbd"} Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.804855 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-jvwwd" event={"ID":"503e0849-5f67-41ad-b1d8-3ebd8c23cc09","Type":"ContainerStarted","Data":"baaa52d80bce2feae739c814e537e60e136a5f261f2c3867ac2d60a4e157c68e"} Sep 30 20:25:28 crc kubenswrapper[4919]: I0930 20:25:28.832953 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" Sep 30 20:25:29 crc kubenswrapper[4919]: I0930 20:25:29.089376 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj"] Sep 30 20:25:29 crc kubenswrapper[4919]: I0930 20:25:29.816349 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-69c56c6c65-5dw25" event={"ID":"e4b0a44a-dbb4-4185-a7ea-84e6ce972d8d","Type":"ContainerStarted","Data":"182911e64c782a4064b3d8840bbccc080fb5c7cd36f4b989121519893ee0cdf5"} Sep 30 20:25:29 crc kubenswrapper[4919]: I0930 20:25:29.818348 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" event={"ID":"097f3e59-c8c4-4f4e-9d97-e6d402584649","Type":"ContainerStarted","Data":"d6aa026bcdcd84bbc9309b552acb53ad7a96efeedb61e51c4183e3074a69302c"} Sep 30 20:25:29 crc kubenswrapper[4919]: I0930 20:25:29.841136 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-69c56c6c65-5dw25" podStartSLOduration=1.841095643 podStartE2EDuration="1.841095643s" podCreationTimestamp="2025-09-30 20:25:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:25:29.839384363 +0000 UTC m=+714.955417530" watchObservedRunningTime="2025-09-30 20:25:29.841095643 +0000 UTC m=+714.957128810" Sep 30 20:25:31 crc kubenswrapper[4919]: I0930 20:25:31.832432 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" event={"ID":"a3942c38-5d58-41de-9bdb-afd674081e1e","Type":"ContainerStarted","Data":"93d892b8a952114f450c991c52d69b9720e55bc95a557d63b6a6ff0f04f275fd"} Sep 30 20:25:31 crc kubenswrapper[4919]: I0930 20:25:31.833128 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" Sep 30 20:25:31 crc kubenswrapper[4919]: I0930 20:25:31.836389 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" event={"ID":"097f3e59-c8c4-4f4e-9d97-e6d402584649","Type":"ContainerStarted","Data":"254170c1bfd9148961423334b898afcfb000353e37025415463be95a584f377a"} Sep 30 20:25:31 crc kubenswrapper[4919]: I0930 20:25:31.838306 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-kdt4l" event={"ID":"b6a02ba1-cd2d-408a-8037-2f277448c7cf","Type":"ContainerStarted","Data":"6f3aa769ca8ae3b7fb3d22878d57e180841caa274b26553e2dab526d09a2bfa5"} Sep 30 20:25:31 crc kubenswrapper[4919]: I0930 20:25:31.838473 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:31 crc kubenswrapper[4919]: I0930 20:25:31.839738 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-jvwwd" event={"ID":"503e0849-5f67-41ad-b1d8-3ebd8c23cc09","Type":"ContainerStarted","Data":"3b2fe147ee6f03d3b2809397d67559f8a852f37b0498b3b85f74d67d368ad757"} Sep 30 20:25:31 crc kubenswrapper[4919]: I0930 20:25:31.852585 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" podStartSLOduration=1.6919538250000001 podStartE2EDuration="4.852567506s" podCreationTimestamp="2025-09-30 20:25:27 +0000 UTC" firstStartedPulling="2025-09-30 20:25:28.403543745 +0000 UTC m=+713.519576872" lastFinishedPulling="2025-09-30 20:25:31.564157416 +0000 UTC m=+716.680190553" observedRunningTime="2025-09-30 20:25:31.850829505 +0000 UTC m=+716.966862652" watchObservedRunningTime="2025-09-30 20:25:31.852567506 +0000 UTC m=+716.968600633" Sep 30 20:25:31 crc kubenswrapper[4919]: I0930 20:25:31.866937 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-4mdbj" podStartSLOduration=2.413199655 podStartE2EDuration="4.866913262s" podCreationTimestamp="2025-09-30 20:25:27 +0000 UTC" firstStartedPulling="2025-09-30 20:25:29.097430612 +0000 UTC m=+714.213463739" lastFinishedPulling="2025-09-30 20:25:31.551144219 +0000 UTC m=+716.667177346" observedRunningTime="2025-09-30 20:25:31.866473469 +0000 UTC m=+716.982506596" watchObservedRunningTime="2025-09-30 20:25:31.866913262 +0000 UTC m=+716.982946399" Sep 30 20:25:31 crc kubenswrapper[4919]: I0930 20:25:31.884024 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-kdt4l" podStartSLOduration=1.499443649 podStartE2EDuration="4.883986568s" podCreationTimestamp="2025-09-30 20:25:27 +0000 UTC" firstStartedPulling="2025-09-30 20:25:28.167974369 +0000 UTC m=+713.284007496" lastFinishedPulling="2025-09-30 20:25:31.552517288 +0000 UTC m=+716.668550415" observedRunningTime="2025-09-30 20:25:31.880923719 +0000 UTC m=+716.996956856" watchObservedRunningTime="2025-09-30 20:25:31.883986568 +0000 UTC m=+717.000019695" Sep 30 20:25:37 crc kubenswrapper[4919]: I0930 20:25:37.902673 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-jvwwd" event={"ID":"503e0849-5f67-41ad-b1d8-3ebd8c23cc09","Type":"ContainerStarted","Data":"ee2b40fd4db279763ab3edd3e23c9779ad7a4a375f361c6f9596a39b8db36982"} Sep 30 20:25:37 crc kubenswrapper[4919]: I0930 20:25:37.928082 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-jvwwd" podStartSLOduration=2.213638575 podStartE2EDuration="10.928034845s" podCreationTimestamp="2025-09-30 20:25:27 +0000 UTC" firstStartedPulling="2025-09-30 20:25:28.541072207 +0000 UTC m=+713.657105354" lastFinishedPulling="2025-09-30 20:25:37.255468497 +0000 UTC m=+722.371501624" observedRunningTime="2025-09-30 20:25:37.924130112 +0000 UTC m=+723.040163279" watchObservedRunningTime="2025-09-30 20:25:37.928034845 +0000 UTC m=+723.044067972" Sep 30 20:25:38 crc kubenswrapper[4919]: I0930 20:25:38.165625 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-kdt4l" Sep 30 20:25:38 crc kubenswrapper[4919]: I0930 20:25:38.502058 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:38 crc kubenswrapper[4919]: I0930 20:25:38.502196 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:38 crc kubenswrapper[4919]: I0930 20:25:38.509393 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:38 crc kubenswrapper[4919]: I0930 20:25:38.925380 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-69c56c6c65-5dw25" Sep 30 20:25:39 crc kubenswrapper[4919]: I0930 20:25:39.001533 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-qxlpx"] Sep 30 20:25:48 crc kubenswrapper[4919]: I0930 20:25:48.158728 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-7kc5q" Sep 30 20:25:56 crc kubenswrapper[4919]: I0930 20:25:56.061950 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:25:56 crc kubenswrapper[4919]: I0930 20:25:56.062568 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.157144 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr"] Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.159115 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.161517 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.175767 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr"] Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.260396 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-m9k8f"] Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.260587 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" podUID="4599a59e-3533-494d-b149-f84b3033c62c" containerName="controller-manager" containerID="cri-o://afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af" gracePeriod=30 Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.320450 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmqhb\" (UniqueName: \"kubernetes.io/projected/ef6fff6c-fe79-4db3-a127-8d9938489f52-kube-api-access-fmqhb\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr\" (UID: \"ef6fff6c-fe79-4db3-a127-8d9938489f52\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.320505 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef6fff6c-fe79-4db3-a127-8d9938489f52-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr\" (UID: \"ef6fff6c-fe79-4db3-a127-8d9938489f52\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.320526 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef6fff6c-fe79-4db3-a127-8d9938489f52-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr\" (UID: \"ef6fff6c-fe79-4db3-a127-8d9938489f52\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.326753 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq"] Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.326942 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" podUID="dc9cfa84-ad26-4f6a-aaea-75ee71060ff3" containerName="route-controller-manager" containerID="cri-o://4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e" gracePeriod=30 Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.421231 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef6fff6c-fe79-4db3-a127-8d9938489f52-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr\" (UID: \"ef6fff6c-fe79-4db3-a127-8d9938489f52\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.421273 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef6fff6c-fe79-4db3-a127-8d9938489f52-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr\" (UID: \"ef6fff6c-fe79-4db3-a127-8d9938489f52\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.421352 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmqhb\" (UniqueName: \"kubernetes.io/projected/ef6fff6c-fe79-4db3-a127-8d9938489f52-kube-api-access-fmqhb\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr\" (UID: \"ef6fff6c-fe79-4db3-a127-8d9938489f52\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.422197 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef6fff6c-fe79-4db3-a127-8d9938489f52-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr\" (UID: \"ef6fff6c-fe79-4db3-a127-8d9938489f52\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.422346 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef6fff6c-fe79-4db3-a127-8d9938489f52-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr\" (UID: \"ef6fff6c-fe79-4db3-a127-8d9938489f52\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.439881 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmqhb\" (UniqueName: \"kubernetes.io/projected/ef6fff6c-fe79-4db3-a127-8d9938489f52-kube-api-access-fmqhb\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr\" (UID: \"ef6fff6c-fe79-4db3-a127-8d9938489f52\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.473887 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.623894 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.723728 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4599a59e-3533-494d-b149-f84b3033c62c-serving-cert\") pod \"4599a59e-3533-494d-b149-f84b3033c62c\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.723807 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-proxy-ca-bundles\") pod \"4599a59e-3533-494d-b149-f84b3033c62c\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.723866 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-client-ca\") pod \"4599a59e-3533-494d-b149-f84b3033c62c\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.723896 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-config\") pod \"4599a59e-3533-494d-b149-f84b3033c62c\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.723918 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xx52g\" (UniqueName: \"kubernetes.io/projected/4599a59e-3533-494d-b149-f84b3033c62c-kube-api-access-xx52g\") pod \"4599a59e-3533-494d-b149-f84b3033c62c\" (UID: \"4599a59e-3533-494d-b149-f84b3033c62c\") " Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.724735 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-client-ca" (OuterVolumeSpecName: "client-ca") pod "4599a59e-3533-494d-b149-f84b3033c62c" (UID: "4599a59e-3533-494d-b149-f84b3033c62c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.724848 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-config" (OuterVolumeSpecName: "config") pod "4599a59e-3533-494d-b149-f84b3033c62c" (UID: "4599a59e-3533-494d-b149-f84b3033c62c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.725040 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "4599a59e-3533-494d-b149-f84b3033c62c" (UID: "4599a59e-3533-494d-b149-f84b3033c62c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.726273 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.728654 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4599a59e-3533-494d-b149-f84b3033c62c-kube-api-access-xx52g" (OuterVolumeSpecName: "kube-api-access-xx52g") pod "4599a59e-3533-494d-b149-f84b3033c62c" (UID: "4599a59e-3533-494d-b149-f84b3033c62c"). InnerVolumeSpecName "kube-api-access-xx52g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.729346 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4599a59e-3533-494d-b149-f84b3033c62c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4599a59e-3533-494d-b149-f84b3033c62c" (UID: "4599a59e-3533-494d-b149-f84b3033c62c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.824414 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-config\") pod \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.824485 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-serving-cert\") pod \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.824543 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-client-ca\") pod \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.824567 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2nv5\" (UniqueName: \"kubernetes.io/projected/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-kube-api-access-v2nv5\") pod \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\" (UID: \"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3\") " Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.824779 4919 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.824793 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.824803 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xx52g\" (UniqueName: \"kubernetes.io/projected/4599a59e-3533-494d-b149-f84b3033c62c-kube-api-access-xx52g\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.824812 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4599a59e-3533-494d-b149-f84b3033c62c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.824820 4919 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4599a59e-3533-494d-b149-f84b3033c62c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.825721 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-client-ca" (OuterVolumeSpecName: "client-ca") pod "dc9cfa84-ad26-4f6a-aaea-75ee71060ff3" (UID: "dc9cfa84-ad26-4f6a-aaea-75ee71060ff3"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.825839 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-config" (OuterVolumeSpecName: "config") pod "dc9cfa84-ad26-4f6a-aaea-75ee71060ff3" (UID: "dc9cfa84-ad26-4f6a-aaea-75ee71060ff3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.828478 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-kube-api-access-v2nv5" (OuterVolumeSpecName: "kube-api-access-v2nv5") pod "dc9cfa84-ad26-4f6a-aaea-75ee71060ff3" (UID: "dc9cfa84-ad26-4f6a-aaea-75ee71060ff3"). InnerVolumeSpecName "kube-api-access-v2nv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.828473 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "dc9cfa84-ad26-4f6a-aaea-75ee71060ff3" (UID: "dc9cfa84-ad26-4f6a-aaea-75ee71060ff3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.926815 4919 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.926873 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2nv5\" (UniqueName: \"kubernetes.io/projected/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-kube-api-access-v2nv5\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.926903 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.926930 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:03 crc kubenswrapper[4919]: I0930 20:26:03.927778 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr"] Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.074462 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-qxlpx" podUID="2d053914-edeb-49d0-bffa-b6d63885a5fb" containerName="console" containerID="cri-o://673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6" gracePeriod=15 Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.112648 4919 generic.go:334] "Generic (PLEG): container finished" podID="4599a59e-3533-494d-b149-f84b3033c62c" containerID="afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af" exitCode=0 Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.112708 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.112733 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" event={"ID":"4599a59e-3533-494d-b149-f84b3033c62c","Type":"ContainerDied","Data":"afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af"} Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.112777 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-m9k8f" event={"ID":"4599a59e-3533-494d-b149-f84b3033c62c","Type":"ContainerDied","Data":"00258f713d6316dad18b7350af5745b060a79d9b81c63f8ac8df90ea645db473"} Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.112799 4919 scope.go:117] "RemoveContainer" containerID="afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.119560 4919 generic.go:334] "Generic (PLEG): container finished" podID="dc9cfa84-ad26-4f6a-aaea-75ee71060ff3" containerID="4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e" exitCode=0 Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.119661 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" event={"ID":"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3","Type":"ContainerDied","Data":"4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e"} Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.119732 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" event={"ID":"dc9cfa84-ad26-4f6a-aaea-75ee71060ff3","Type":"ContainerDied","Data":"c8c456c16629efaaaf1c8ebbf7b2f9bad6d8d99044f8811c787d5b2123e17008"} Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.119814 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.122874 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" event={"ID":"ef6fff6c-fe79-4db3-a127-8d9938489f52","Type":"ContainerStarted","Data":"a424a961927b2c073d02b628c4e4c61c574f8c85722bf7dfa28a18aa7460d185"} Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.122930 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" event={"ID":"ef6fff6c-fe79-4db3-a127-8d9938489f52","Type":"ContainerStarted","Data":"69f77004afd6ea9fe5caff8ce96b03bdd12781d3514c0e5f317b8d410a4cce47"} Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.162442 4919 scope.go:117] "RemoveContainer" containerID="afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af" Sep 30 20:26:04 crc kubenswrapper[4919]: E0930 20:26:04.164466 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af\": container with ID starting with afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af not found: ID does not exist" containerID="afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.164546 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af"} err="failed to get container status \"afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af\": rpc error: code = NotFound desc = could not find container \"afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af\": container with ID starting with afd99edaa3e2fc409eb31ff90c036262d500ea94091d06f8f6f497d6c29066af not found: ID does not exist" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.164592 4919 scope.go:117] "RemoveContainer" containerID="4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.167513 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq"] Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.174321 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-8brdq"] Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.181753 4919 scope.go:117] "RemoveContainer" containerID="4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e" Sep 30 20:26:04 crc kubenswrapper[4919]: E0930 20:26:04.182354 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e\": container with ID starting with 4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e not found: ID does not exist" containerID="4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.182387 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e"} err="failed to get container status \"4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e\": rpc error: code = NotFound desc = could not find container \"4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e\": container with ID starting with 4641cf1803f93fec7824b0a46d9eac0d74f8676744ce0162872966815e8b731e not found: ID does not exist" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.192151 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-m9k8f"] Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.196716 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-m9k8f"] Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.490601 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-qxlpx_2d053914-edeb-49d0-bffa-b6d63885a5fb/console/0.log" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.490664 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.634624 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-serving-cert\") pod \"2d053914-edeb-49d0-bffa-b6d63885a5fb\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.634746 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-oauth-serving-cert\") pod \"2d053914-edeb-49d0-bffa-b6d63885a5fb\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.634808 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-service-ca\") pod \"2d053914-edeb-49d0-bffa-b6d63885a5fb\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.634837 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-config\") pod \"2d053914-edeb-49d0-bffa-b6d63885a5fb\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.634878 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-trusted-ca-bundle\") pod \"2d053914-edeb-49d0-bffa-b6d63885a5fb\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.634906 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-oauth-config\") pod \"2d053914-edeb-49d0-bffa-b6d63885a5fb\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.634934 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vghd7\" (UniqueName: \"kubernetes.io/projected/2d053914-edeb-49d0-bffa-b6d63885a5fb-kube-api-access-vghd7\") pod \"2d053914-edeb-49d0-bffa-b6d63885a5fb\" (UID: \"2d053914-edeb-49d0-bffa-b6d63885a5fb\") " Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.635618 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "2d053914-edeb-49d0-bffa-b6d63885a5fb" (UID: "2d053914-edeb-49d0-bffa-b6d63885a5fb"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.635633 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "2d053914-edeb-49d0-bffa-b6d63885a5fb" (UID: "2d053914-edeb-49d0-bffa-b6d63885a5fb"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.635684 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-config" (OuterVolumeSpecName: "console-config") pod "2d053914-edeb-49d0-bffa-b6d63885a5fb" (UID: "2d053914-edeb-49d0-bffa-b6d63885a5fb"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.635896 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-service-ca" (OuterVolumeSpecName: "service-ca") pod "2d053914-edeb-49d0-bffa-b6d63885a5fb" (UID: "2d053914-edeb-49d0-bffa-b6d63885a5fb"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.651454 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "2d053914-edeb-49d0-bffa-b6d63885a5fb" (UID: "2d053914-edeb-49d0-bffa-b6d63885a5fb"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.654691 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "2d053914-edeb-49d0-bffa-b6d63885a5fb" (UID: "2d053914-edeb-49d0-bffa-b6d63885a5fb"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.657887 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d053914-edeb-49d0-bffa-b6d63885a5fb-kube-api-access-vghd7" (OuterVolumeSpecName: "kube-api-access-vghd7") pod "2d053914-edeb-49d0-bffa-b6d63885a5fb" (UID: "2d053914-edeb-49d0-bffa-b6d63885a5fb"). InnerVolumeSpecName "kube-api-access-vghd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.736151 4919 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.736258 4919 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-service-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.736299 4919 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.736313 4919 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2d053914-edeb-49d0-bffa-b6d63885a5fb-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.736322 4919 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.736333 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vghd7\" (UniqueName: \"kubernetes.io/projected/2d053914-edeb-49d0-bffa-b6d63885a5fb-kube-api-access-vghd7\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.736342 4919 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2d053914-edeb-49d0-bffa-b6d63885a5fb-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.854983 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t"] Sep 30 20:26:04 crc kubenswrapper[4919]: E0930 20:26:04.855238 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4599a59e-3533-494d-b149-f84b3033c62c" containerName="controller-manager" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.855252 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="4599a59e-3533-494d-b149-f84b3033c62c" containerName="controller-manager" Sep 30 20:26:04 crc kubenswrapper[4919]: E0930 20:26:04.855268 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc9cfa84-ad26-4f6a-aaea-75ee71060ff3" containerName="route-controller-manager" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.855274 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc9cfa84-ad26-4f6a-aaea-75ee71060ff3" containerName="route-controller-manager" Sep 30 20:26:04 crc kubenswrapper[4919]: E0930 20:26:04.855283 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d053914-edeb-49d0-bffa-b6d63885a5fb" containerName="console" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.855288 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d053914-edeb-49d0-bffa-b6d63885a5fb" containerName="console" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.855383 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc9cfa84-ad26-4f6a-aaea-75ee71060ff3" containerName="route-controller-manager" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.855394 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="4599a59e-3533-494d-b149-f84b3033c62c" containerName="controller-manager" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.855408 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d053914-edeb-49d0-bffa-b6d63885a5fb" containerName="console" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.855788 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.857891 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.858897 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-865664d8fd-zftxr"] Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.859665 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.861410 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.861684 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.863140 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.865535 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.865919 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.865501 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.866782 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.866989 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.867094 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.874691 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.886915 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.892774 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.895792 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t"] Sep 30 20:26:04 crc kubenswrapper[4919]: I0930 20:26:04.899751 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-865664d8fd-zftxr"] Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.039847 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18674508-e102-4f46-988d-743c561690e6-serving-cert\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.040344 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7gh8\" (UniqueName: \"kubernetes.io/projected/18674508-e102-4f46-988d-743c561690e6-kube-api-access-b7gh8\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.040397 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gvrh\" (UniqueName: \"kubernetes.io/projected/50557193-910d-4913-b3b0-0328c53dd3d4-kube-api-access-5gvrh\") pod \"route-controller-manager-5f57c47998-9lz4t\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.040434 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50557193-910d-4913-b3b0-0328c53dd3d4-client-ca\") pod \"route-controller-manager-5f57c47998-9lz4t\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.040490 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50557193-910d-4913-b3b0-0328c53dd3d4-config\") pod \"route-controller-manager-5f57c47998-9lz4t\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.040598 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50557193-910d-4913-b3b0-0328c53dd3d4-serving-cert\") pod \"route-controller-manager-5f57c47998-9lz4t\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.040648 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-config\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.040680 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-client-ca\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.040843 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-proxy-ca-bundles\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.052182 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-865664d8fd-zftxr"] Sep 30 20:26:05 crc kubenswrapper[4919]: E0930 20:26:05.052647 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[client-ca config kube-api-access-b7gh8 proxy-ca-bundles serving-cert], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" podUID="18674508-e102-4f46-988d-743c561690e6" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.077956 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t"] Sep 30 20:26:05 crc kubenswrapper[4919]: E0930 20:26:05.078480 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[client-ca config kube-api-access-5gvrh serving-cert], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" podUID="50557193-910d-4913-b3b0-0328c53dd3d4" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.130708 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-qxlpx_2d053914-edeb-49d0-bffa-b6d63885a5fb/console/0.log" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.130748 4919 generic.go:334] "Generic (PLEG): container finished" podID="2d053914-edeb-49d0-bffa-b6d63885a5fb" containerID="673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6" exitCode=2 Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.130788 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-qxlpx" event={"ID":"2d053914-edeb-49d0-bffa-b6d63885a5fb","Type":"ContainerDied","Data":"673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6"} Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.130809 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-qxlpx" event={"ID":"2d053914-edeb-49d0-bffa-b6d63885a5fb","Type":"ContainerDied","Data":"f96619556c058ee8d3dad9046a7c95e1738ff4d9d46c9a0fed8f869285065430"} Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.130825 4919 scope.go:117] "RemoveContainer" containerID="673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.130919 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-qxlpx" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.141740 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50557193-910d-4913-b3b0-0328c53dd3d4-serving-cert\") pod \"route-controller-manager-5f57c47998-9lz4t\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.141794 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-config\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.141813 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-client-ca\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.141848 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-proxy-ca-bundles\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.141878 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18674508-e102-4f46-988d-743c561690e6-serving-cert\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.141895 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7gh8\" (UniqueName: \"kubernetes.io/projected/18674508-e102-4f46-988d-743c561690e6-kube-api-access-b7gh8\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.141927 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gvrh\" (UniqueName: \"kubernetes.io/projected/50557193-910d-4913-b3b0-0328c53dd3d4-kube-api-access-5gvrh\") pod \"route-controller-manager-5f57c47998-9lz4t\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.141959 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50557193-910d-4913-b3b0-0328c53dd3d4-client-ca\") pod \"route-controller-manager-5f57c47998-9lz4t\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.141993 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50557193-910d-4913-b3b0-0328c53dd3d4-config\") pod \"route-controller-manager-5f57c47998-9lz4t\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.143100 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50557193-910d-4913-b3b0-0328c53dd3d4-config\") pod \"route-controller-manager-5f57c47998-9lz4t\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.143805 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-config\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.143990 4919 generic.go:334] "Generic (PLEG): container finished" podID="ef6fff6c-fe79-4db3-a127-8d9938489f52" containerID="a424a961927b2c073d02b628c4e4c61c574f8c85722bf7dfa28a18aa7460d185" exitCode=0 Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.144049 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.144137 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50557193-910d-4913-b3b0-0328c53dd3d4-client-ca\") pod \"route-controller-manager-5f57c47998-9lz4t\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.144308 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" event={"ID":"ef6fff6c-fe79-4db3-a127-8d9938489f52","Type":"ContainerDied","Data":"a424a961927b2c073d02b628c4e4c61c574f8c85722bf7dfa28a18aa7460d185"} Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.144546 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.145329 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-proxy-ca-bundles\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.145350 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-client-ca\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.147634 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50557193-910d-4913-b3b0-0328c53dd3d4-serving-cert\") pod \"route-controller-manager-5f57c47998-9lz4t\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.149609 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18674508-e102-4f46-988d-743c561690e6-serving-cert\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.163423 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7gh8\" (UniqueName: \"kubernetes.io/projected/18674508-e102-4f46-988d-743c561690e6-kube-api-access-b7gh8\") pod \"controller-manager-865664d8fd-zftxr\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.163992 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gvrh\" (UniqueName: \"kubernetes.io/projected/50557193-910d-4913-b3b0-0328c53dd3d4-kube-api-access-5gvrh\") pod \"route-controller-manager-5f57c47998-9lz4t\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.181991 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-qxlpx"] Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.190797 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-qxlpx"] Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.204500 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.205715 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.216661 4919 scope.go:117] "RemoveContainer" containerID="673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6" Sep 30 20:26:05 crc kubenswrapper[4919]: E0930 20:26:05.217093 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6\": container with ID starting with 673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6 not found: ID does not exist" containerID="673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.217135 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6"} err="failed to get container status \"673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6\": rpc error: code = NotFound desc = could not find container \"673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6\": container with ID starting with 673c638607b55ced1265a054facd3d24df62a398dd92a571a53559ca7021bfe6 not found: ID does not exist" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.343565 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-client-ca\") pod \"18674508-e102-4f46-988d-743c561690e6\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.343659 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50557193-910d-4913-b3b0-0328c53dd3d4-client-ca\") pod \"50557193-910d-4913-b3b0-0328c53dd3d4\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.343710 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50557193-910d-4913-b3b0-0328c53dd3d4-serving-cert\") pod \"50557193-910d-4913-b3b0-0328c53dd3d4\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.343817 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-config\") pod \"18674508-e102-4f46-988d-743c561690e6\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.343934 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18674508-e102-4f46-988d-743c561690e6-serving-cert\") pod \"18674508-e102-4f46-988d-743c561690e6\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.343991 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7gh8\" (UniqueName: \"kubernetes.io/projected/18674508-e102-4f46-988d-743c561690e6-kube-api-access-b7gh8\") pod \"18674508-e102-4f46-988d-743c561690e6\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.344053 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50557193-910d-4913-b3b0-0328c53dd3d4-config\") pod \"50557193-910d-4913-b3b0-0328c53dd3d4\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.344173 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-proxy-ca-bundles\") pod \"18674508-e102-4f46-988d-743c561690e6\" (UID: \"18674508-e102-4f46-988d-743c561690e6\") " Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.344251 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gvrh\" (UniqueName: \"kubernetes.io/projected/50557193-910d-4913-b3b0-0328c53dd3d4-kube-api-access-5gvrh\") pod \"50557193-910d-4913-b3b0-0328c53dd3d4\" (UID: \"50557193-910d-4913-b3b0-0328c53dd3d4\") " Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.344359 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50557193-910d-4913-b3b0-0328c53dd3d4-client-ca" (OuterVolumeSpecName: "client-ca") pod "50557193-910d-4913-b3b0-0328c53dd3d4" (UID: "50557193-910d-4913-b3b0-0328c53dd3d4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.344549 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-client-ca" (OuterVolumeSpecName: "client-ca") pod "18674508-e102-4f46-988d-743c561690e6" (UID: "18674508-e102-4f46-988d-743c561690e6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.344699 4919 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.344745 4919 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/50557193-910d-4913-b3b0-0328c53dd3d4-client-ca\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.345365 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "18674508-e102-4f46-988d-743c561690e6" (UID: "18674508-e102-4f46-988d-743c561690e6"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.345657 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50557193-910d-4913-b3b0-0328c53dd3d4-config" (OuterVolumeSpecName: "config") pod "50557193-910d-4913-b3b0-0328c53dd3d4" (UID: "50557193-910d-4913-b3b0-0328c53dd3d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.345975 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-config" (OuterVolumeSpecName: "config") pod "18674508-e102-4f46-988d-743c561690e6" (UID: "18674508-e102-4f46-988d-743c561690e6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.347670 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50557193-910d-4913-b3b0-0328c53dd3d4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "50557193-910d-4913-b3b0-0328c53dd3d4" (UID: "50557193-910d-4913-b3b0-0328c53dd3d4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.348505 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18674508-e102-4f46-988d-743c561690e6-kube-api-access-b7gh8" (OuterVolumeSpecName: "kube-api-access-b7gh8") pod "18674508-e102-4f46-988d-743c561690e6" (UID: "18674508-e102-4f46-988d-743c561690e6"). InnerVolumeSpecName "kube-api-access-b7gh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.348764 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18674508-e102-4f46-988d-743c561690e6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "18674508-e102-4f46-988d-743c561690e6" (UID: "18674508-e102-4f46-988d-743c561690e6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.349756 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50557193-910d-4913-b3b0-0328c53dd3d4-kube-api-access-5gvrh" (OuterVolumeSpecName: "kube-api-access-5gvrh") pod "50557193-910d-4913-b3b0-0328c53dd3d4" (UID: "50557193-910d-4913-b3b0-0328c53dd3d4"). InnerVolumeSpecName "kube-api-access-5gvrh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.446029 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50557193-910d-4913-b3b0-0328c53dd3d4-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.446063 4919 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.446076 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gvrh\" (UniqueName: \"kubernetes.io/projected/50557193-910d-4913-b3b0-0328c53dd3d4-kube-api-access-5gvrh\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.446089 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/50557193-910d-4913-b3b0-0328c53dd3d4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.446101 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18674508-e102-4f46-988d-743c561690e6-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.446111 4919 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18674508-e102-4f46-988d-743c561690e6-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.446123 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7gh8\" (UniqueName: \"kubernetes.io/projected/18674508-e102-4f46-988d-743c561690e6-kube-api-access-b7gh8\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.639985 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d053914-edeb-49d0-bffa-b6d63885a5fb" path="/var/lib/kubelet/pods/2d053914-edeb-49d0-bffa-b6d63885a5fb/volumes" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.640540 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4599a59e-3533-494d-b149-f84b3033c62c" path="/var/lib/kubelet/pods/4599a59e-3533-494d-b149-f84b3033c62c/volumes" Sep 30 20:26:05 crc kubenswrapper[4919]: I0930 20:26:05.641034 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc9cfa84-ad26-4f6a-aaea-75ee71060ff3" path="/var/lib/kubelet/pods/dc9cfa84-ad26-4f6a-aaea-75ee71060ff3/volumes" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.150682 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-865664d8fd-zftxr" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.150933 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.187129 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-865664d8fd-zftxr"] Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.198911 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-865664d8fd-zftxr"] Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.212366 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5c69d55649-wxrnv"] Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.213159 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.215168 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.215515 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.215699 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.217769 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.218813 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.219292 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.224940 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.232853 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t"] Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.265321 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5f57c47998-9lz4t"] Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.272150 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c69d55649-wxrnv"] Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.360340 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/55619381-d6b2-4067-a9eb-af1875a6704e-client-ca\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.360376 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55619381-d6b2-4067-a9eb-af1875a6704e-serving-cert\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.360422 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/55619381-d6b2-4067-a9eb-af1875a6704e-proxy-ca-bundles\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.360498 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55619381-d6b2-4067-a9eb-af1875a6704e-config\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.360619 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4vf6\" (UniqueName: \"kubernetes.io/projected/55619381-d6b2-4067-a9eb-af1875a6704e-kube-api-access-r4vf6\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.461775 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/55619381-d6b2-4067-a9eb-af1875a6704e-proxy-ca-bundles\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.461844 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55619381-d6b2-4067-a9eb-af1875a6704e-config\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.461886 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4vf6\" (UniqueName: \"kubernetes.io/projected/55619381-d6b2-4067-a9eb-af1875a6704e-kube-api-access-r4vf6\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.461949 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/55619381-d6b2-4067-a9eb-af1875a6704e-client-ca\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.461974 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55619381-d6b2-4067-a9eb-af1875a6704e-serving-cert\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.463241 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/55619381-d6b2-4067-a9eb-af1875a6704e-proxy-ca-bundles\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.463712 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/55619381-d6b2-4067-a9eb-af1875a6704e-client-ca\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.464738 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55619381-d6b2-4067-a9eb-af1875a6704e-config\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.474328 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/55619381-d6b2-4067-a9eb-af1875a6704e-serving-cert\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.491511 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4vf6\" (UniqueName: \"kubernetes.io/projected/55619381-d6b2-4067-a9eb-af1875a6704e-kube-api-access-r4vf6\") pod \"controller-manager-5c69d55649-wxrnv\" (UID: \"55619381-d6b2-4067-a9eb-af1875a6704e\") " pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.533298 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:06 crc kubenswrapper[4919]: I0930 20:26:06.750539 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c69d55649-wxrnv"] Sep 30 20:26:06 crc kubenswrapper[4919]: W0930 20:26:06.760442 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55619381_d6b2_4067_a9eb_af1875a6704e.slice/crio-0dcc5aa57367b680a5897f03deb875900701637f13ce067e97956697db47a2e8 WatchSource:0}: Error finding container 0dcc5aa57367b680a5897f03deb875900701637f13ce067e97956697db47a2e8: Status 404 returned error can't find the container with id 0dcc5aa57367b680a5897f03deb875900701637f13ce067e97956697db47a2e8 Sep 30 20:26:07 crc kubenswrapper[4919]: I0930 20:26:07.157698 4919 generic.go:334] "Generic (PLEG): container finished" podID="ef6fff6c-fe79-4db3-a127-8d9938489f52" containerID="85e1cf4eba2d5775549cc3af1844d156f44daa451106ef804c2d65cbf7ad07f6" exitCode=0 Sep 30 20:26:07 crc kubenswrapper[4919]: I0930 20:26:07.157863 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" event={"ID":"ef6fff6c-fe79-4db3-a127-8d9938489f52","Type":"ContainerDied","Data":"85e1cf4eba2d5775549cc3af1844d156f44daa451106ef804c2d65cbf7ad07f6"} Sep 30 20:26:07 crc kubenswrapper[4919]: I0930 20:26:07.159591 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" event={"ID":"55619381-d6b2-4067-a9eb-af1875a6704e","Type":"ContainerStarted","Data":"fcc81be62324d87f08fad7fe035625bfde9d3c76723a6faf17e7e90e41a8a46d"} Sep 30 20:26:07 crc kubenswrapper[4919]: I0930 20:26:07.159642 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" event={"ID":"55619381-d6b2-4067-a9eb-af1875a6704e","Type":"ContainerStarted","Data":"0dcc5aa57367b680a5897f03deb875900701637f13ce067e97956697db47a2e8"} Sep 30 20:26:07 crc kubenswrapper[4919]: I0930 20:26:07.159896 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:07 crc kubenswrapper[4919]: I0930 20:26:07.166868 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" Sep 30 20:26:07 crc kubenswrapper[4919]: I0930 20:26:07.200160 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5c69d55649-wxrnv" podStartSLOduration=2.200146098 podStartE2EDuration="2.200146098s" podCreationTimestamp="2025-09-30 20:26:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:26:07.199085177 +0000 UTC m=+752.315118304" watchObservedRunningTime="2025-09-30 20:26:07.200146098 +0000 UTC m=+752.316179225" Sep 30 20:26:07 crc kubenswrapper[4919]: I0930 20:26:07.643634 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18674508-e102-4f46-988d-743c561690e6" path="/var/lib/kubelet/pods/18674508-e102-4f46-988d-743c561690e6/volumes" Sep 30 20:26:07 crc kubenswrapper[4919]: I0930 20:26:07.644157 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50557193-910d-4913-b3b0-0328c53dd3d4" path="/var/lib/kubelet/pods/50557193-910d-4913-b3b0-0328c53dd3d4/volumes" Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.176500 4919 generic.go:334] "Generic (PLEG): container finished" podID="ef6fff6c-fe79-4db3-a127-8d9938489f52" containerID="a8ec87b3b3b164fee6d84b2462d4d6e8db0cfe4c947843405fd7f63943d00791" exitCode=0 Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.176592 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" event={"ID":"ef6fff6c-fe79-4db3-a127-8d9938489f52","Type":"ContainerDied","Data":"a8ec87b3b3b164fee6d84b2462d4d6e8db0cfe4c947843405fd7f63943d00791"} Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.859080 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw"] Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.861162 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.865138 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.865487 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.865570 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.865644 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.865692 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.869302 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw"] Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.869448 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.993993 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj22c\" (UniqueName: \"kubernetes.io/projected/03833a7a-ec42-4ead-9f81-4a76fefd8a06-kube-api-access-bj22c\") pod \"route-controller-manager-64bc885476-vktvw\" (UID: \"03833a7a-ec42-4ead-9f81-4a76fefd8a06\") " pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.994081 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03833a7a-ec42-4ead-9f81-4a76fefd8a06-serving-cert\") pod \"route-controller-manager-64bc885476-vktvw\" (UID: \"03833a7a-ec42-4ead-9f81-4a76fefd8a06\") " pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.994116 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03833a7a-ec42-4ead-9f81-4a76fefd8a06-client-ca\") pod \"route-controller-manager-64bc885476-vktvw\" (UID: \"03833a7a-ec42-4ead-9f81-4a76fefd8a06\") " pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:08 crc kubenswrapper[4919]: I0930 20:26:08.994148 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03833a7a-ec42-4ead-9f81-4a76fefd8a06-config\") pod \"route-controller-manager-64bc885476-vktvw\" (UID: \"03833a7a-ec42-4ead-9f81-4a76fefd8a06\") " pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.095035 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj22c\" (UniqueName: \"kubernetes.io/projected/03833a7a-ec42-4ead-9f81-4a76fefd8a06-kube-api-access-bj22c\") pod \"route-controller-manager-64bc885476-vktvw\" (UID: \"03833a7a-ec42-4ead-9f81-4a76fefd8a06\") " pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.095076 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03833a7a-ec42-4ead-9f81-4a76fefd8a06-serving-cert\") pod \"route-controller-manager-64bc885476-vktvw\" (UID: \"03833a7a-ec42-4ead-9f81-4a76fefd8a06\") " pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.095100 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03833a7a-ec42-4ead-9f81-4a76fefd8a06-client-ca\") pod \"route-controller-manager-64bc885476-vktvw\" (UID: \"03833a7a-ec42-4ead-9f81-4a76fefd8a06\") " pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.095119 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03833a7a-ec42-4ead-9f81-4a76fefd8a06-config\") pod \"route-controller-manager-64bc885476-vktvw\" (UID: \"03833a7a-ec42-4ead-9f81-4a76fefd8a06\") " pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.096230 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03833a7a-ec42-4ead-9f81-4a76fefd8a06-config\") pod \"route-controller-manager-64bc885476-vktvw\" (UID: \"03833a7a-ec42-4ead-9f81-4a76fefd8a06\") " pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.096351 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03833a7a-ec42-4ead-9f81-4a76fefd8a06-client-ca\") pod \"route-controller-manager-64bc885476-vktvw\" (UID: \"03833a7a-ec42-4ead-9f81-4a76fefd8a06\") " pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.109930 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03833a7a-ec42-4ead-9f81-4a76fefd8a06-serving-cert\") pod \"route-controller-manager-64bc885476-vktvw\" (UID: \"03833a7a-ec42-4ead-9f81-4a76fefd8a06\") " pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.118608 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj22c\" (UniqueName: \"kubernetes.io/projected/03833a7a-ec42-4ead-9f81-4a76fefd8a06-kube-api-access-bj22c\") pod \"route-controller-manager-64bc885476-vktvw\" (UID: \"03833a7a-ec42-4ead-9f81-4a76fefd8a06\") " pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.178350 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.511050 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.558488 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw"] Sep 30 20:26:09 crc kubenswrapper[4919]: W0930 20:26:09.570442 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03833a7a_ec42_4ead_9f81_4a76fefd8a06.slice/crio-0de0aabfc6805256ef6ee475758264f9143beccdc23aa6623e7590d4fa48dd29 WatchSource:0}: Error finding container 0de0aabfc6805256ef6ee475758264f9143beccdc23aa6623e7590d4fa48dd29: Status 404 returned error can't find the container with id 0de0aabfc6805256ef6ee475758264f9143beccdc23aa6623e7590d4fa48dd29 Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.603963 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef6fff6c-fe79-4db3-a127-8d9938489f52-util\") pod \"ef6fff6c-fe79-4db3-a127-8d9938489f52\" (UID: \"ef6fff6c-fe79-4db3-a127-8d9938489f52\") " Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.604762 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef6fff6c-fe79-4db3-a127-8d9938489f52-bundle\") pod \"ef6fff6c-fe79-4db3-a127-8d9938489f52\" (UID: \"ef6fff6c-fe79-4db3-a127-8d9938489f52\") " Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.604890 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmqhb\" (UniqueName: \"kubernetes.io/projected/ef6fff6c-fe79-4db3-a127-8d9938489f52-kube-api-access-fmqhb\") pod \"ef6fff6c-fe79-4db3-a127-8d9938489f52\" (UID: \"ef6fff6c-fe79-4db3-a127-8d9938489f52\") " Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.605843 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef6fff6c-fe79-4db3-a127-8d9938489f52-bundle" (OuterVolumeSpecName: "bundle") pod "ef6fff6c-fe79-4db3-a127-8d9938489f52" (UID: "ef6fff6c-fe79-4db3-a127-8d9938489f52"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.611783 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef6fff6c-fe79-4db3-a127-8d9938489f52-kube-api-access-fmqhb" (OuterVolumeSpecName: "kube-api-access-fmqhb") pod "ef6fff6c-fe79-4db3-a127-8d9938489f52" (UID: "ef6fff6c-fe79-4db3-a127-8d9938489f52"). InnerVolumeSpecName "kube-api-access-fmqhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.701931 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5tkrx"] Sep 30 20:26:09 crc kubenswrapper[4919]: E0930 20:26:09.702312 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef6fff6c-fe79-4db3-a127-8d9938489f52" containerName="extract" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.702335 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef6fff6c-fe79-4db3-a127-8d9938489f52" containerName="extract" Sep 30 20:26:09 crc kubenswrapper[4919]: E0930 20:26:09.702349 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef6fff6c-fe79-4db3-a127-8d9938489f52" containerName="pull" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.702357 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef6fff6c-fe79-4db3-a127-8d9938489f52" containerName="pull" Sep 30 20:26:09 crc kubenswrapper[4919]: E0930 20:26:09.702380 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef6fff6c-fe79-4db3-a127-8d9938489f52" containerName="util" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.702387 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef6fff6c-fe79-4db3-a127-8d9938489f52" containerName="util" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.713074 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmqhb\" (UniqueName: \"kubernetes.io/projected/ef6fff6c-fe79-4db3-a127-8d9938489f52-kube-api-access-fmqhb\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.713340 4919 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ef6fff6c-fe79-4db3-a127-8d9938489f52-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.720490 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef6fff6c-fe79-4db3-a127-8d9938489f52" containerName="extract" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.726288 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.730406 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5tkrx"] Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.915571 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-catalog-content\") pod \"redhat-operators-5tkrx\" (UID: \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\") " pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.915642 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x5r2\" (UniqueName: \"kubernetes.io/projected/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-kube-api-access-8x5r2\") pod \"redhat-operators-5tkrx\" (UID: \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\") " pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:09 crc kubenswrapper[4919]: I0930 20:26:09.915679 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-utilities\") pod \"redhat-operators-5tkrx\" (UID: \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\") " pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.017683 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-catalog-content\") pod \"redhat-operators-5tkrx\" (UID: \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\") " pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.018074 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x5r2\" (UniqueName: \"kubernetes.io/projected/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-kube-api-access-8x5r2\") pod \"redhat-operators-5tkrx\" (UID: \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\") " pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.018231 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-utilities\") pod \"redhat-operators-5tkrx\" (UID: \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\") " pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.018643 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-catalog-content\") pod \"redhat-operators-5tkrx\" (UID: \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\") " pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.018881 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-utilities\") pod \"redhat-operators-5tkrx\" (UID: \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\") " pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.036181 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x5r2\" (UniqueName: \"kubernetes.io/projected/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-kube-api-access-8x5r2\") pod \"redhat-operators-5tkrx\" (UID: \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\") " pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.049468 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.064178 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef6fff6c-fe79-4db3-a127-8d9938489f52-util" (OuterVolumeSpecName: "util") pod "ef6fff6c-fe79-4db3-a127-8d9938489f52" (UID: "ef6fff6c-fe79-4db3-a127-8d9938489f52"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.119928 4919 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ef6fff6c-fe79-4db3-a127-8d9938489f52-util\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.168392 4919 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.207909 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" event={"ID":"03833a7a-ec42-4ead-9f81-4a76fefd8a06","Type":"ContainerStarted","Data":"6b23f2460575553190d46630beebd352e645e7136543e23df7d0bd8d886d887e"} Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.207959 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" event={"ID":"03833a7a-ec42-4ead-9f81-4a76fefd8a06","Type":"ContainerStarted","Data":"0de0aabfc6805256ef6ee475758264f9143beccdc23aa6623e7590d4fa48dd29"} Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.208510 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.211474 4919 patch_prober.go:28] interesting pod/route-controller-manager-64bc885476-vktvw container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.67:8443/healthz\": dial tcp 10.217.0.67:8443: connect: connection refused" start-of-body= Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.211589 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" podUID="03833a7a-ec42-4ead-9f81-4a76fefd8a06" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.67:8443/healthz\": dial tcp 10.217.0.67:8443: connect: connection refused" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.216004 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" event={"ID":"ef6fff6c-fe79-4db3-a127-8d9938489f52","Type":"ContainerDied","Data":"69f77004afd6ea9fe5caff8ce96b03bdd12781d3514c0e5f317b8d410a4cce47"} Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.216048 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69f77004afd6ea9fe5caff8ce96b03bdd12781d3514c0e5f317b8d410a4cce47" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.216140 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr" Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.486321 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" podStartSLOduration=5.486287021 podStartE2EDuration="5.486287021s" podCreationTimestamp="2025-09-30 20:26:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:26:10.246091121 +0000 UTC m=+755.362124268" watchObservedRunningTime="2025-09-30 20:26:10.486287021 +0000 UTC m=+755.602320148" Sep 30 20:26:10 crc kubenswrapper[4919]: W0930 20:26:10.497382 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23b7e26b_bfb1_4799_9ee3_7cf6c9a7e8e9.slice/crio-27a86a80fbcf4100a80e61478158e3ad1936808a39de61655ecefe79158f0573 WatchSource:0}: Error finding container 27a86a80fbcf4100a80e61478158e3ad1936808a39de61655ecefe79158f0573: Status 404 returned error can't find the container with id 27a86a80fbcf4100a80e61478158e3ad1936808a39de61655ecefe79158f0573 Sep 30 20:26:10 crc kubenswrapper[4919]: I0930 20:26:10.497949 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5tkrx"] Sep 30 20:26:11 crc kubenswrapper[4919]: I0930 20:26:11.224585 4919 generic.go:334] "Generic (PLEG): container finished" podID="23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" containerID="2f6c2d7f746ae3b33bf020e992dd13ba6a3e2baaf171eb8172948fcbae313d78" exitCode=0 Sep 30 20:26:11 crc kubenswrapper[4919]: I0930 20:26:11.226926 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tkrx" event={"ID":"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9","Type":"ContainerDied","Data":"2f6c2d7f746ae3b33bf020e992dd13ba6a3e2baaf171eb8172948fcbae313d78"} Sep 30 20:26:11 crc kubenswrapper[4919]: I0930 20:26:11.226981 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tkrx" event={"ID":"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9","Type":"ContainerStarted","Data":"27a86a80fbcf4100a80e61478158e3ad1936808a39de61655ecefe79158f0573"} Sep 30 20:26:11 crc kubenswrapper[4919]: I0930 20:26:11.231733 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-64bc885476-vktvw" Sep 30 20:26:13 crc kubenswrapper[4919]: I0930 20:26:13.240572 4919 generic.go:334] "Generic (PLEG): container finished" podID="23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" containerID="32ea93c13f3da4b7d2074e833aa419ab8cf1dc8812b03f3fa3c7466b3f2a1517" exitCode=0 Sep 30 20:26:13 crc kubenswrapper[4919]: I0930 20:26:13.240681 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tkrx" event={"ID":"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9","Type":"ContainerDied","Data":"32ea93c13f3da4b7d2074e833aa419ab8cf1dc8812b03f3fa3c7466b3f2a1517"} Sep 30 20:26:14 crc kubenswrapper[4919]: I0930 20:26:14.248082 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tkrx" event={"ID":"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9","Type":"ContainerStarted","Data":"ff3d1d5dfeda0e124fb322b28b681dfa164e55cae1acfe5c15aa0c43cd12b0a1"} Sep 30 20:26:14 crc kubenswrapper[4919]: I0930 20:26:14.268464 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5tkrx" podStartSLOduration=2.70262662 podStartE2EDuration="5.268445079s" podCreationTimestamp="2025-09-30 20:26:09 +0000 UTC" firstStartedPulling="2025-09-30 20:26:11.227955935 +0000 UTC m=+756.343989062" lastFinishedPulling="2025-09-30 20:26:13.793774394 +0000 UTC m=+758.909807521" observedRunningTime="2025-09-30 20:26:14.261905009 +0000 UTC m=+759.377938136" watchObservedRunningTime="2025-09-30 20:26:14.268445079 +0000 UTC m=+759.384478226" Sep 30 20:26:19 crc kubenswrapper[4919]: I0930 20:26:19.773977 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh"] Sep 30 20:26:19 crc kubenswrapper[4919]: I0930 20:26:19.775493 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:26:19 crc kubenswrapper[4919]: I0930 20:26:19.778698 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 30 20:26:19 crc kubenswrapper[4919]: I0930 20:26:19.778962 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 30 20:26:19 crc kubenswrapper[4919]: I0930 20:26:19.779229 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 30 20:26:19 crc kubenswrapper[4919]: I0930 20:26:19.779575 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 30 20:26:19 crc kubenswrapper[4919]: I0930 20:26:19.780437 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-f2q2w" Sep 30 20:26:19 crc kubenswrapper[4919]: I0930 20:26:19.792075 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh"] Sep 30 20:26:19 crc kubenswrapper[4919]: I0930 20:26:19.952489 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b90ce133-8951-4dee-92bd-f672580fb818-apiservice-cert\") pod \"metallb-operator-controller-manager-774854f49b-zvlnh\" (UID: \"b90ce133-8951-4dee-92bd-f672580fb818\") " pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:26:19 crc kubenswrapper[4919]: I0930 20:26:19.952897 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b90ce133-8951-4dee-92bd-f672580fb818-webhook-cert\") pod \"metallb-operator-controller-manager-774854f49b-zvlnh\" (UID: \"b90ce133-8951-4dee-92bd-f672580fb818\") " pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:26:19 crc kubenswrapper[4919]: I0930 20:26:19.952964 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l554j\" (UniqueName: \"kubernetes.io/projected/b90ce133-8951-4dee-92bd-f672580fb818-kube-api-access-l554j\") pod \"metallb-operator-controller-manager-774854f49b-zvlnh\" (UID: \"b90ce133-8951-4dee-92bd-f672580fb818\") " pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.041526 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx"] Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.042422 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.044942 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.045295 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.045789 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-dc9xs" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.050075 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.050338 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.053990 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b90ce133-8951-4dee-92bd-f672580fb818-webhook-cert\") pod \"metallb-operator-controller-manager-774854f49b-zvlnh\" (UID: \"b90ce133-8951-4dee-92bd-f672580fb818\") " pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.054035 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l554j\" (UniqueName: \"kubernetes.io/projected/b90ce133-8951-4dee-92bd-f672580fb818-kube-api-access-l554j\") pod \"metallb-operator-controller-manager-774854f49b-zvlnh\" (UID: \"b90ce133-8951-4dee-92bd-f672580fb818\") " pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.054415 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b90ce133-8951-4dee-92bd-f672580fb818-apiservice-cert\") pod \"metallb-operator-controller-manager-774854f49b-zvlnh\" (UID: \"b90ce133-8951-4dee-92bd-f672580fb818\") " pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.059940 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx"] Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.063259 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b90ce133-8951-4dee-92bd-f672580fb818-webhook-cert\") pod \"metallb-operator-controller-manager-774854f49b-zvlnh\" (UID: \"b90ce133-8951-4dee-92bd-f672580fb818\") " pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.069650 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b90ce133-8951-4dee-92bd-f672580fb818-apiservice-cert\") pod \"metallb-operator-controller-manager-774854f49b-zvlnh\" (UID: \"b90ce133-8951-4dee-92bd-f672580fb818\") " pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.088440 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l554j\" (UniqueName: \"kubernetes.io/projected/b90ce133-8951-4dee-92bd-f672580fb818-kube-api-access-l554j\") pod \"metallb-operator-controller-manager-774854f49b-zvlnh\" (UID: \"b90ce133-8951-4dee-92bd-f672580fb818\") " pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.129112 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.155727 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb4n9\" (UniqueName: \"kubernetes.io/projected/24a33fda-3d02-475b-96c4-4eef5f0a1dcf-kube-api-access-lb4n9\") pod \"metallb-operator-webhook-server-77fbfdddcb-cphcx\" (UID: \"24a33fda-3d02-475b-96c4-4eef5f0a1dcf\") " pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.155816 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/24a33fda-3d02-475b-96c4-4eef5f0a1dcf-webhook-cert\") pod \"metallb-operator-webhook-server-77fbfdddcb-cphcx\" (UID: \"24a33fda-3d02-475b-96c4-4eef5f0a1dcf\") " pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.155840 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/24a33fda-3d02-475b-96c4-4eef5f0a1dcf-apiservice-cert\") pod \"metallb-operator-webhook-server-77fbfdddcb-cphcx\" (UID: \"24a33fda-3d02-475b-96c4-4eef5f0a1dcf\") " pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.256856 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/24a33fda-3d02-475b-96c4-4eef5f0a1dcf-webhook-cert\") pod \"metallb-operator-webhook-server-77fbfdddcb-cphcx\" (UID: \"24a33fda-3d02-475b-96c4-4eef5f0a1dcf\") " pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.257180 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/24a33fda-3d02-475b-96c4-4eef5f0a1dcf-apiservice-cert\") pod \"metallb-operator-webhook-server-77fbfdddcb-cphcx\" (UID: \"24a33fda-3d02-475b-96c4-4eef5f0a1dcf\") " pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.257350 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb4n9\" (UniqueName: \"kubernetes.io/projected/24a33fda-3d02-475b-96c4-4eef5f0a1dcf-kube-api-access-lb4n9\") pod \"metallb-operator-webhook-server-77fbfdddcb-cphcx\" (UID: \"24a33fda-3d02-475b-96c4-4eef5f0a1dcf\") " pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.260768 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/24a33fda-3d02-475b-96c4-4eef5f0a1dcf-webhook-cert\") pod \"metallb-operator-webhook-server-77fbfdddcb-cphcx\" (UID: \"24a33fda-3d02-475b-96c4-4eef5f0a1dcf\") " pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.261672 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/24a33fda-3d02-475b-96c4-4eef5f0a1dcf-apiservice-cert\") pod \"metallb-operator-webhook-server-77fbfdddcb-cphcx\" (UID: \"24a33fda-3d02-475b-96c4-4eef5f0a1dcf\") " pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.274427 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb4n9\" (UniqueName: \"kubernetes.io/projected/24a33fda-3d02-475b-96c4-4eef5f0a1dcf-kube-api-access-lb4n9\") pod \"metallb-operator-webhook-server-77fbfdddcb-cphcx\" (UID: \"24a33fda-3d02-475b-96c4-4eef5f0a1dcf\") " pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.324762 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.357006 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.389689 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.818290 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh"] Sep 30 20:26:20 crc kubenswrapper[4919]: W0930 20:26:20.822083 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb90ce133_8951_4dee_92bd_f672580fb818.slice/crio-5b2eed5772b98b5fd914c71460db9c6d024295919bcaa64248782f4a68a7f722 WatchSource:0}: Error finding container 5b2eed5772b98b5fd914c71460db9c6d024295919bcaa64248782f4a68a7f722: Status 404 returned error can't find the container with id 5b2eed5772b98b5fd914c71460db9c6d024295919bcaa64248782f4a68a7f722 Sep 30 20:26:20 crc kubenswrapper[4919]: I0930 20:26:20.874688 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx"] Sep 30 20:26:20 crc kubenswrapper[4919]: W0930 20:26:20.878461 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24a33fda_3d02_475b_96c4_4eef5f0a1dcf.slice/crio-b71dc061338ba057d0f2efa7415cd1c09ae7c7fe4e4283fff63aea6e641e9197 WatchSource:0}: Error finding container b71dc061338ba057d0f2efa7415cd1c09ae7c7fe4e4283fff63aea6e641e9197: Status 404 returned error can't find the container with id b71dc061338ba057d0f2efa7415cd1c09ae7c7fe4e4283fff63aea6e641e9197 Sep 30 20:26:21 crc kubenswrapper[4919]: I0930 20:26:21.309710 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" event={"ID":"b90ce133-8951-4dee-92bd-f672580fb818","Type":"ContainerStarted","Data":"5b2eed5772b98b5fd914c71460db9c6d024295919bcaa64248782f4a68a7f722"} Sep 30 20:26:21 crc kubenswrapper[4919]: I0930 20:26:21.311777 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" event={"ID":"24a33fda-3d02-475b-96c4-4eef5f0a1dcf","Type":"ContainerStarted","Data":"b71dc061338ba057d0f2efa7415cd1c09ae7c7fe4e4283fff63aea6e641e9197"} Sep 30 20:26:21 crc kubenswrapper[4919]: I0930 20:26:21.695585 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5tkrx"] Sep 30 20:26:23 crc kubenswrapper[4919]: I0930 20:26:23.324376 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5tkrx" podUID="23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" containerName="registry-server" containerID="cri-o://ff3d1d5dfeda0e124fb322b28b681dfa164e55cae1acfe5c15aa0c43cd12b0a1" gracePeriod=2 Sep 30 20:26:24 crc kubenswrapper[4919]: I0930 20:26:24.351335 4919 generic.go:334] "Generic (PLEG): container finished" podID="23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" containerID="ff3d1d5dfeda0e124fb322b28b681dfa164e55cae1acfe5c15aa0c43cd12b0a1" exitCode=0 Sep 30 20:26:24 crc kubenswrapper[4919]: I0930 20:26:24.352106 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tkrx" event={"ID":"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9","Type":"ContainerDied","Data":"ff3d1d5dfeda0e124fb322b28b681dfa164e55cae1acfe5c15aa0c43cd12b0a1"} Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.062348 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.062683 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.062732 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.063229 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"233411e098bbdd508df400a23be94bf9227b0271eb6d0d9c0dd1c95d19986660"} pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.063283 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" containerID="cri-o://233411e098bbdd508df400a23be94bf9227b0271eb6d0d9c0dd1c95d19986660" gracePeriod=600 Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.484194 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.668562 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8x5r2\" (UniqueName: \"kubernetes.io/projected/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-kube-api-access-8x5r2\") pod \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\" (UID: \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\") " Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.668995 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-catalog-content\") pod \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\" (UID: \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\") " Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.669098 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-utilities\") pod \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\" (UID: \"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9\") " Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.669952 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-utilities" (OuterVolumeSpecName: "utilities") pod "23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" (UID: "23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.673876 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-kube-api-access-8x5r2" (OuterVolumeSpecName: "kube-api-access-8x5r2") pod "23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" (UID: "23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9"). InnerVolumeSpecName "kube-api-access-8x5r2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.753380 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" (UID: "23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.770674 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8x5r2\" (UniqueName: \"kubernetes.io/projected/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-kube-api-access-8x5r2\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.770708 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:26 crc kubenswrapper[4919]: I0930 20:26:26.770720 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.375727 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5tkrx" event={"ID":"23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9","Type":"ContainerDied","Data":"27a86a80fbcf4100a80e61478158e3ad1936808a39de61655ecefe79158f0573"} Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.377764 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" event={"ID":"b90ce133-8951-4dee-92bd-f672580fb818","Type":"ContainerStarted","Data":"079f69db07710003ee470e9525f3ace433869a89d1dbde4ba55f4a26b5942d6b"} Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.377989 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.377933 4919 scope.go:117] "RemoveContainer" containerID="ff3d1d5dfeda0e124fb322b28b681dfa164e55cae1acfe5c15aa0c43cd12b0a1" Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.376032 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5tkrx" Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.381691 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb371a63-6d82-453e-930e-656710b97f10" containerID="233411e098bbdd508df400a23be94bf9227b0271eb6d0d9c0dd1c95d19986660" exitCode=0 Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.381885 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerDied","Data":"233411e098bbdd508df400a23be94bf9227b0271eb6d0d9c0dd1c95d19986660"} Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.381976 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"7330287e87c2c36810a07467a4c3caedfb96311988e76c64c3eedda691a5f9f5"} Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.383876 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" event={"ID":"24a33fda-3d02-475b-96c4-4eef5f0a1dcf","Type":"ContainerStarted","Data":"c74d253f78c5b83bffee5d07e4ed23e3dbcf867582c1be40ec2304f50c4205b4"} Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.384590 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.394531 4919 scope.go:117] "RemoveContainer" containerID="32ea93c13f3da4b7d2074e833aa419ab8cf1dc8812b03f3fa3c7466b3f2a1517" Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.444800 4919 scope.go:117] "RemoveContainer" containerID="2f6c2d7f746ae3b33bf020e992dd13ba6a3e2baaf171eb8172948fcbae313d78" Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.458408 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" podStartSLOduration=2.822927709 podStartE2EDuration="8.45838438s" podCreationTimestamp="2025-09-30 20:26:19 +0000 UTC" firstStartedPulling="2025-09-30 20:26:20.824275969 +0000 UTC m=+765.940309096" lastFinishedPulling="2025-09-30 20:26:26.45973265 +0000 UTC m=+771.575765767" observedRunningTime="2025-09-30 20:26:27.42841958 +0000 UTC m=+772.544452747" watchObservedRunningTime="2025-09-30 20:26:27.45838438 +0000 UTC m=+772.574417507" Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.459158 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" podStartSLOduration=1.843620859 podStartE2EDuration="7.459146002s" podCreationTimestamp="2025-09-30 20:26:20 +0000 UTC" firstStartedPulling="2025-09-30 20:26:20.881452068 +0000 UTC m=+765.997485195" lastFinishedPulling="2025-09-30 20:26:26.496977201 +0000 UTC m=+771.613010338" observedRunningTime="2025-09-30 20:26:27.453759586 +0000 UTC m=+772.569792743" watchObservedRunningTime="2025-09-30 20:26:27.459146002 +0000 UTC m=+772.575179129" Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.490334 4919 scope.go:117] "RemoveContainer" containerID="a845129a4d8c98980a8f13a053b2a40edd21a9b836def7381c0f8f8c97e05996" Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.502603 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5tkrx"] Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.511584 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5tkrx"] Sep 30 20:26:27 crc kubenswrapper[4919]: I0930 20:26:27.660797 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" path="/var/lib/kubelet/pods/23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9/volumes" Sep 30 20:26:40 crc kubenswrapper[4919]: I0930 20:26:40.364479 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-77fbfdddcb-cphcx" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.307842 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vnp9m"] Sep 30 20:26:41 crc kubenswrapper[4919]: E0930 20:26:41.308519 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" containerName="registry-server" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.308542 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" containerName="registry-server" Sep 30 20:26:41 crc kubenswrapper[4919]: E0930 20:26:41.308555 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" containerName="extract-content" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.308563 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" containerName="extract-content" Sep 30 20:26:41 crc kubenswrapper[4919]: E0930 20:26:41.308577 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" containerName="extract-utilities" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.308587 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" containerName="extract-utilities" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.308735 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="23b7e26b-bfb1-4799-9ee3-7cf6c9a7e8e9" containerName="registry-server" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.309782 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.330194 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnp9m"] Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.473268 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rk76\" (UniqueName: \"kubernetes.io/projected/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-kube-api-access-7rk76\") pod \"redhat-marketplace-vnp9m\" (UID: \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\") " pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.473343 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-utilities\") pod \"redhat-marketplace-vnp9m\" (UID: \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\") " pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.473374 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-catalog-content\") pod \"redhat-marketplace-vnp9m\" (UID: \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\") " pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.574754 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-utilities\") pod \"redhat-marketplace-vnp9m\" (UID: \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\") " pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.574820 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-catalog-content\") pod \"redhat-marketplace-vnp9m\" (UID: \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\") " pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.574891 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rk76\" (UniqueName: \"kubernetes.io/projected/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-kube-api-access-7rk76\") pod \"redhat-marketplace-vnp9m\" (UID: \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\") " pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.575537 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-utilities\") pod \"redhat-marketplace-vnp9m\" (UID: \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\") " pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.575621 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-catalog-content\") pod \"redhat-marketplace-vnp9m\" (UID: \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\") " pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.596543 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rk76\" (UniqueName: \"kubernetes.io/projected/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-kube-api-access-7rk76\") pod \"redhat-marketplace-vnp9m\" (UID: \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\") " pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:41 crc kubenswrapper[4919]: I0930 20:26:41.631383 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:42 crc kubenswrapper[4919]: I0930 20:26:42.058487 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnp9m"] Sep 30 20:26:42 crc kubenswrapper[4919]: I0930 20:26:42.488965 4919 generic.go:334] "Generic (PLEG): container finished" podID="466fed70-c49a-42d7-a0d3-f575bdfbf8a6" containerID="f756f062cee5e8d911532cb0bec06cd2429d43a1655d59fdd7a0be6b996e8963" exitCode=0 Sep 30 20:26:42 crc kubenswrapper[4919]: I0930 20:26:42.489017 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnp9m" event={"ID":"466fed70-c49a-42d7-a0d3-f575bdfbf8a6","Type":"ContainerDied","Data":"f756f062cee5e8d911532cb0bec06cd2429d43a1655d59fdd7a0be6b996e8963"} Sep 30 20:26:42 crc kubenswrapper[4919]: I0930 20:26:42.489062 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnp9m" event={"ID":"466fed70-c49a-42d7-a0d3-f575bdfbf8a6","Type":"ContainerStarted","Data":"7f1ca44043ec7d1be4430e5f96ad55c25fcafc4aa0773258fdf6572a7f3865f7"} Sep 30 20:26:44 crc kubenswrapper[4919]: I0930 20:26:44.516979 4919 generic.go:334] "Generic (PLEG): container finished" podID="466fed70-c49a-42d7-a0d3-f575bdfbf8a6" containerID="35b17ad801c5d65daeecae2b73033515250e83e23aa1e1fb23fc3eba52bc85a3" exitCode=0 Sep 30 20:26:44 crc kubenswrapper[4919]: I0930 20:26:44.517068 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnp9m" event={"ID":"466fed70-c49a-42d7-a0d3-f575bdfbf8a6","Type":"ContainerDied","Data":"35b17ad801c5d65daeecae2b73033515250e83e23aa1e1fb23fc3eba52bc85a3"} Sep 30 20:26:45 crc kubenswrapper[4919]: I0930 20:26:45.526086 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnp9m" event={"ID":"466fed70-c49a-42d7-a0d3-f575bdfbf8a6","Type":"ContainerStarted","Data":"549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c"} Sep 30 20:26:45 crc kubenswrapper[4919]: I0930 20:26:45.556870 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vnp9m" podStartSLOduration=2.099057558 podStartE2EDuration="4.556848089s" podCreationTimestamp="2025-09-30 20:26:41 +0000 UTC" firstStartedPulling="2025-09-30 20:26:42.491044146 +0000 UTC m=+787.607077273" lastFinishedPulling="2025-09-30 20:26:44.948834667 +0000 UTC m=+790.064867804" observedRunningTime="2025-09-30 20:26:45.556459748 +0000 UTC m=+790.672492885" watchObservedRunningTime="2025-09-30 20:26:45.556848089 +0000 UTC m=+790.672881236" Sep 30 20:26:51 crc kubenswrapper[4919]: I0930 20:26:51.641192 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:51 crc kubenswrapper[4919]: I0930 20:26:51.641731 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:51 crc kubenswrapper[4919]: I0930 20:26:51.670669 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:52 crc kubenswrapper[4919]: I0930 20:26:52.633273 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:53 crc kubenswrapper[4919]: I0930 20:26:53.100097 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnp9m"] Sep 30 20:26:54 crc kubenswrapper[4919]: I0930 20:26:54.587887 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vnp9m" podUID="466fed70-c49a-42d7-a0d3-f575bdfbf8a6" containerName="registry-server" containerID="cri-o://549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c" gracePeriod=2 Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.037546 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.098663 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-catalog-content\") pod \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\" (UID: \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\") " Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.098793 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rk76\" (UniqueName: \"kubernetes.io/projected/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-kube-api-access-7rk76\") pod \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\" (UID: \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\") " Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.098839 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-utilities\") pod \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\" (UID: \"466fed70-c49a-42d7-a0d3-f575bdfbf8a6\") " Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.099852 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-utilities" (OuterVolumeSpecName: "utilities") pod "466fed70-c49a-42d7-a0d3-f575bdfbf8a6" (UID: "466fed70-c49a-42d7-a0d3-f575bdfbf8a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.114847 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-kube-api-access-7rk76" (OuterVolumeSpecName: "kube-api-access-7rk76") pod "466fed70-c49a-42d7-a0d3-f575bdfbf8a6" (UID: "466fed70-c49a-42d7-a0d3-f575bdfbf8a6"). InnerVolumeSpecName "kube-api-access-7rk76". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.116003 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "466fed70-c49a-42d7-a0d3-f575bdfbf8a6" (UID: "466fed70-c49a-42d7-a0d3-f575bdfbf8a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.200254 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rk76\" (UniqueName: \"kubernetes.io/projected/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-kube-api-access-7rk76\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.200320 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.200339 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/466fed70-c49a-42d7-a0d3-f575bdfbf8a6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.601316 4919 generic.go:334] "Generic (PLEG): container finished" podID="466fed70-c49a-42d7-a0d3-f575bdfbf8a6" containerID="549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c" exitCode=0 Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.601430 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnp9m" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.601431 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnp9m" event={"ID":"466fed70-c49a-42d7-a0d3-f575bdfbf8a6","Type":"ContainerDied","Data":"549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c"} Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.601580 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnp9m" event={"ID":"466fed70-c49a-42d7-a0d3-f575bdfbf8a6","Type":"ContainerDied","Data":"7f1ca44043ec7d1be4430e5f96ad55c25fcafc4aa0773258fdf6572a7f3865f7"} Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.601605 4919 scope.go:117] "RemoveContainer" containerID="549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.638621 4919 scope.go:117] "RemoveContainer" containerID="35b17ad801c5d65daeecae2b73033515250e83e23aa1e1fb23fc3eba52bc85a3" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.674753 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnp9m"] Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.678277 4919 scope.go:117] "RemoveContainer" containerID="f756f062cee5e8d911532cb0bec06cd2429d43a1655d59fdd7a0be6b996e8963" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.682873 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnp9m"] Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.710951 4919 scope.go:117] "RemoveContainer" containerID="549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c" Sep 30 20:26:55 crc kubenswrapper[4919]: E0930 20:26:55.711779 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c\": container with ID starting with 549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c not found: ID does not exist" containerID="549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.711831 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c"} err="failed to get container status \"549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c\": rpc error: code = NotFound desc = could not find container \"549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c\": container with ID starting with 549ecb8fea500929de8399473e71b20befd6d3abe2e89203e8db3f06deec2b5c not found: ID does not exist" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.711907 4919 scope.go:117] "RemoveContainer" containerID="35b17ad801c5d65daeecae2b73033515250e83e23aa1e1fb23fc3eba52bc85a3" Sep 30 20:26:55 crc kubenswrapper[4919]: E0930 20:26:55.712376 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35b17ad801c5d65daeecae2b73033515250e83e23aa1e1fb23fc3eba52bc85a3\": container with ID starting with 35b17ad801c5d65daeecae2b73033515250e83e23aa1e1fb23fc3eba52bc85a3 not found: ID does not exist" containerID="35b17ad801c5d65daeecae2b73033515250e83e23aa1e1fb23fc3eba52bc85a3" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.712414 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35b17ad801c5d65daeecae2b73033515250e83e23aa1e1fb23fc3eba52bc85a3"} err="failed to get container status \"35b17ad801c5d65daeecae2b73033515250e83e23aa1e1fb23fc3eba52bc85a3\": rpc error: code = NotFound desc = could not find container \"35b17ad801c5d65daeecae2b73033515250e83e23aa1e1fb23fc3eba52bc85a3\": container with ID starting with 35b17ad801c5d65daeecae2b73033515250e83e23aa1e1fb23fc3eba52bc85a3 not found: ID does not exist" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.712432 4919 scope.go:117] "RemoveContainer" containerID="f756f062cee5e8d911532cb0bec06cd2429d43a1655d59fdd7a0be6b996e8963" Sep 30 20:26:55 crc kubenswrapper[4919]: E0930 20:26:55.712793 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f756f062cee5e8d911532cb0bec06cd2429d43a1655d59fdd7a0be6b996e8963\": container with ID starting with f756f062cee5e8d911532cb0bec06cd2429d43a1655d59fdd7a0be6b996e8963 not found: ID does not exist" containerID="f756f062cee5e8d911532cb0bec06cd2429d43a1655d59fdd7a0be6b996e8963" Sep 30 20:26:55 crc kubenswrapper[4919]: I0930 20:26:55.712824 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f756f062cee5e8d911532cb0bec06cd2429d43a1655d59fdd7a0be6b996e8963"} err="failed to get container status \"f756f062cee5e8d911532cb0bec06cd2429d43a1655d59fdd7a0be6b996e8963\": rpc error: code = NotFound desc = could not find container \"f756f062cee5e8d911532cb0bec06cd2429d43a1655d59fdd7a0be6b996e8963\": container with ID starting with f756f062cee5e8d911532cb0bec06cd2429d43a1655d59fdd7a0be6b996e8963 not found: ID does not exist" Sep 30 20:26:57 crc kubenswrapper[4919]: I0930 20:26:57.639126 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="466fed70-c49a-42d7-a0d3-f575bdfbf8a6" path="/var/lib/kubelet/pods/466fed70-c49a-42d7-a0d3-f575bdfbf8a6/volumes" Sep 30 20:27:00 crc kubenswrapper[4919]: I0930 20:27:00.395911 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-774854f49b-zvlnh" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.262655 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-j8cms"] Sep 30 20:27:01 crc kubenswrapper[4919]: E0930 20:27:01.263040 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="466fed70-c49a-42d7-a0d3-f575bdfbf8a6" containerName="extract-content" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.263058 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="466fed70-c49a-42d7-a0d3-f575bdfbf8a6" containerName="extract-content" Sep 30 20:27:01 crc kubenswrapper[4919]: E0930 20:27:01.263095 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="466fed70-c49a-42d7-a0d3-f575bdfbf8a6" containerName="extract-utilities" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.263106 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="466fed70-c49a-42d7-a0d3-f575bdfbf8a6" containerName="extract-utilities" Sep 30 20:27:01 crc kubenswrapper[4919]: E0930 20:27:01.263123 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="466fed70-c49a-42d7-a0d3-f575bdfbf8a6" containerName="registry-server" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.263133 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="466fed70-c49a-42d7-a0d3-f575bdfbf8a6" containerName="registry-server" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.263316 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="466fed70-c49a-42d7-a0d3-f575bdfbf8a6" containerName="registry-server" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.266094 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.268588 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.268840 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-vtjdj" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.268923 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.270721 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl"] Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.271703 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.274506 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.304723 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl"] Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.341983 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-zqj5g"] Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.342811 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-zqj5g" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.346651 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.346875 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-sxqfc" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.347075 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.347350 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.354555 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-th6f6"] Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.355464 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:01 crc kubenswrapper[4919]: W0930 20:27:01.359816 4919 reflector.go:561] object-"metallb-system"/"controller-certs-secret": failed to list *v1.Secret: secrets "controller-certs-secret" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "metallb-system": no relationship found between node 'crc' and this object Sep 30 20:27:01 crc kubenswrapper[4919]: E0930 20:27:01.359855 4919 reflector.go:158] "Unhandled Error" err="object-\"metallb-system\"/\"controller-certs-secret\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"controller-certs-secret\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"metallb-system\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.373142 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-th6f6"] Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.386634 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/7fdc3545-02de-4073-b40a-249a1a858d3c-frr-sockets\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.386697 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/7fdc3545-02de-4073-b40a-249a1a858d3c-frr-startup\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.386731 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7fdc3545-02de-4073-b40a-249a1a858d3c-metrics-certs\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.386761 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn527\" (UniqueName: \"kubernetes.io/projected/07ab26d9-dc69-47bf-9d23-b0f94cf42749-kube-api-access-xn527\") pod \"frr-k8s-webhook-server-5478bdb765-dgcsl\" (UID: \"07ab26d9-dc69-47bf-9d23-b0f94cf42749\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.386781 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rvhz\" (UniqueName: \"kubernetes.io/projected/7fdc3545-02de-4073-b40a-249a1a858d3c-kube-api-access-4rvhz\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.386799 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/7fdc3545-02de-4073-b40a-249a1a858d3c-reloader\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.386815 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/07ab26d9-dc69-47bf-9d23-b0f94cf42749-cert\") pod \"frr-k8s-webhook-server-5478bdb765-dgcsl\" (UID: \"07ab26d9-dc69-47bf-9d23-b0f94cf42749\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.386831 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/7fdc3545-02de-4073-b40a-249a1a858d3c-metrics\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.386847 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/7fdc3545-02de-4073-b40a-249a1a858d3c-frr-conf\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.487883 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n9q4\" (UniqueName: \"kubernetes.io/projected/5de2da45-8775-4784-9c80-810c6713751e-kube-api-access-8n9q4\") pod \"controller-5d688f5ffc-th6f6\" (UID: \"5de2da45-8775-4784-9c80-810c6713751e\") " pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.487940 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn527\" (UniqueName: \"kubernetes.io/projected/07ab26d9-dc69-47bf-9d23-b0f94cf42749-kube-api-access-xn527\") pod \"frr-k8s-webhook-server-5478bdb765-dgcsl\" (UID: \"07ab26d9-dc69-47bf-9d23-b0f94cf42749\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.487962 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-metrics-certs\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.488010 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rvhz\" (UniqueName: \"kubernetes.io/projected/7fdc3545-02de-4073-b40a-249a1a858d3c-kube-api-access-4rvhz\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.488036 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/7fdc3545-02de-4073-b40a-249a1a858d3c-reloader\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.488408 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/07ab26d9-dc69-47bf-9d23-b0f94cf42749-cert\") pod \"frr-k8s-webhook-server-5478bdb765-dgcsl\" (UID: \"07ab26d9-dc69-47bf-9d23-b0f94cf42749\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.488429 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/7fdc3545-02de-4073-b40a-249a1a858d3c-metrics\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.488548 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/7fdc3545-02de-4073-b40a-249a1a858d3c-reloader\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.488848 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/7fdc3545-02de-4073-b40a-249a1a858d3c-metrics\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.489408 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/7fdc3545-02de-4073-b40a-249a1a858d3c-frr-conf\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.489527 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5de2da45-8775-4784-9c80-810c6713751e-metrics-certs\") pod \"controller-5d688f5ffc-th6f6\" (UID: \"5de2da45-8775-4784-9c80-810c6713751e\") " pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.489616 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/7fdc3545-02de-4073-b40a-249a1a858d3c-frr-sockets\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.489641 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/7fdc3545-02de-4073-b40a-249a1a858d3c-frr-conf\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.489731 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/2b1754b1-e5ff-4053-bd08-09773a42d4eb-metallb-excludel2\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.489782 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/7fdc3545-02de-4073-b40a-249a1a858d3c-frr-startup\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.489849 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-memberlist\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.489848 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/7fdc3545-02de-4073-b40a-249a1a858d3c-frr-sockets\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.489870 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jq4p\" (UniqueName: \"kubernetes.io/projected/2b1754b1-e5ff-4053-bd08-09773a42d4eb-kube-api-access-4jq4p\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.489898 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5de2da45-8775-4784-9c80-810c6713751e-cert\") pod \"controller-5d688f5ffc-th6f6\" (UID: \"5de2da45-8775-4784-9c80-810c6713751e\") " pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.489932 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7fdc3545-02de-4073-b40a-249a1a858d3c-metrics-certs\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.490599 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/7fdc3545-02de-4073-b40a-249a1a858d3c-frr-startup\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.497856 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/07ab26d9-dc69-47bf-9d23-b0f94cf42749-cert\") pod \"frr-k8s-webhook-server-5478bdb765-dgcsl\" (UID: \"07ab26d9-dc69-47bf-9d23-b0f94cf42749\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.498693 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7fdc3545-02de-4073-b40a-249a1a858d3c-metrics-certs\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.512975 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn527\" (UniqueName: \"kubernetes.io/projected/07ab26d9-dc69-47bf-9d23-b0f94cf42749-kube-api-access-xn527\") pod \"frr-k8s-webhook-server-5478bdb765-dgcsl\" (UID: \"07ab26d9-dc69-47bf-9d23-b0f94cf42749\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.513078 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rvhz\" (UniqueName: \"kubernetes.io/projected/7fdc3545-02de-4073-b40a-249a1a858d3c-kube-api-access-4rvhz\") pod \"frr-k8s-j8cms\" (UID: \"7fdc3545-02de-4073-b40a-249a1a858d3c\") " pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.591240 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/2b1754b1-e5ff-4053-bd08-09773a42d4eb-metallb-excludel2\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.591362 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-memberlist\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.591401 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jq4p\" (UniqueName: \"kubernetes.io/projected/2b1754b1-e5ff-4053-bd08-09773a42d4eb-kube-api-access-4jq4p\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.591441 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5de2da45-8775-4784-9c80-810c6713751e-cert\") pod \"controller-5d688f5ffc-th6f6\" (UID: \"5de2da45-8775-4784-9c80-810c6713751e\") " pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.591511 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n9q4\" (UniqueName: \"kubernetes.io/projected/5de2da45-8775-4784-9c80-810c6713751e-kube-api-access-8n9q4\") pod \"controller-5d688f5ffc-th6f6\" (UID: \"5de2da45-8775-4784-9c80-810c6713751e\") " pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.591556 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-metrics-certs\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:01 crc kubenswrapper[4919]: E0930 20:27:01.591580 4919 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.591621 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5de2da45-8775-4784-9c80-810c6713751e-metrics-certs\") pod \"controller-5d688f5ffc-th6f6\" (UID: \"5de2da45-8775-4784-9c80-810c6713751e\") " pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:01 crc kubenswrapper[4919]: E0930 20:27:01.591670 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-memberlist podName:2b1754b1-e5ff-4053-bd08-09773a42d4eb nodeName:}" failed. No retries permitted until 2025-09-30 20:27:02.091644455 +0000 UTC m=+807.207677602 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-memberlist") pod "speaker-zqj5g" (UID: "2b1754b1-e5ff-4053-bd08-09773a42d4eb") : secret "metallb-memberlist" not found Sep 30 20:27:01 crc kubenswrapper[4919]: E0930 20:27:01.592347 4919 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Sep 30 20:27:01 crc kubenswrapper[4919]: E0930 20:27:01.592430 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-metrics-certs podName:2b1754b1-e5ff-4053-bd08-09773a42d4eb nodeName:}" failed. No retries permitted until 2025-09-30 20:27:02.092406347 +0000 UTC m=+807.208439544 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-metrics-certs") pod "speaker-zqj5g" (UID: "2b1754b1-e5ff-4053-bd08-09773a42d4eb") : secret "speaker-certs-secret" not found Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.592348 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/2b1754b1-e5ff-4053-bd08-09773a42d4eb-metallb-excludel2\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.594263 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.596271 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.610368 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5de2da45-8775-4784-9c80-810c6713751e-cert\") pod \"controller-5d688f5ffc-th6f6\" (UID: \"5de2da45-8775-4784-9c80-810c6713751e\") " pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.616285 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jq4p\" (UniqueName: \"kubernetes.io/projected/2b1754b1-e5ff-4053-bd08-09773a42d4eb-kube-api-access-4jq4p\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.621411 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n9q4\" (UniqueName: \"kubernetes.io/projected/5de2da45-8775-4784-9c80-810c6713751e-kube-api-access-8n9q4\") pod \"controller-5d688f5ffc-th6f6\" (UID: \"5de2da45-8775-4784-9c80-810c6713751e\") " pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:01 crc kubenswrapper[4919]: I0930 20:27:01.621648 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" Sep 30 20:27:02 crc kubenswrapper[4919]: I0930 20:27:02.042088 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl"] Sep 30 20:27:02 crc kubenswrapper[4919]: W0930 20:27:02.048194 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07ab26d9_dc69_47bf_9d23_b0f94cf42749.slice/crio-781c8be4dd1a1e33a982a5d7748a213e857b6a2e55dd0b1bb247ddf65d857e22 WatchSource:0}: Error finding container 781c8be4dd1a1e33a982a5d7748a213e857b6a2e55dd0b1bb247ddf65d857e22: Status 404 returned error can't find the container with id 781c8be4dd1a1e33a982a5d7748a213e857b6a2e55dd0b1bb247ddf65d857e22 Sep 30 20:27:02 crc kubenswrapper[4919]: I0930 20:27:02.099046 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-memberlist\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:02 crc kubenswrapper[4919]: I0930 20:27:02.099151 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-metrics-certs\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:02 crc kubenswrapper[4919]: E0930 20:27:02.099282 4919 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 20:27:02 crc kubenswrapper[4919]: E0930 20:27:02.099358 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-memberlist podName:2b1754b1-e5ff-4053-bd08-09773a42d4eb nodeName:}" failed. No retries permitted until 2025-09-30 20:27:03.099340597 +0000 UTC m=+808.215373724 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-memberlist") pod "speaker-zqj5g" (UID: "2b1754b1-e5ff-4053-bd08-09773a42d4eb") : secret "metallb-memberlist" not found Sep 30 20:27:02 crc kubenswrapper[4919]: I0930 20:27:02.108722 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-metrics-certs\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:02 crc kubenswrapper[4919]: E0930 20:27:02.592099 4919 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: failed to sync secret cache: timed out waiting for the condition Sep 30 20:27:02 crc kubenswrapper[4919]: E0930 20:27:02.592188 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5de2da45-8775-4784-9c80-810c6713751e-metrics-certs podName:5de2da45-8775-4784-9c80-810c6713751e nodeName:}" failed. No retries permitted until 2025-09-30 20:27:03.092166599 +0000 UTC m=+808.208199736 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5de2da45-8775-4784-9c80-810c6713751e-metrics-certs") pod "controller-5d688f5ffc-th6f6" (UID: "5de2da45-8775-4784-9c80-810c6713751e") : failed to sync secret cache: timed out waiting for the condition Sep 30 20:27:02 crc kubenswrapper[4919]: I0930 20:27:02.651225 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" event={"ID":"07ab26d9-dc69-47bf-9d23-b0f94cf42749","Type":"ContainerStarted","Data":"781c8be4dd1a1e33a982a5d7748a213e857b6a2e55dd0b1bb247ddf65d857e22"} Sep 30 20:27:02 crc kubenswrapper[4919]: I0930 20:27:02.653318 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-j8cms" event={"ID":"7fdc3545-02de-4073-b40a-249a1a858d3c","Type":"ContainerStarted","Data":"316ae87512336032cdc17989891ed9f8445c6c06b6e6a61b51e622e113f6f86f"} Sep 30 20:27:02 crc kubenswrapper[4919]: I0930 20:27:02.792784 4919 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 30 20:27:03 crc kubenswrapper[4919]: I0930 20:27:03.113001 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5de2da45-8775-4784-9c80-810c6713751e-metrics-certs\") pod \"controller-5d688f5ffc-th6f6\" (UID: \"5de2da45-8775-4784-9c80-810c6713751e\") " pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:03 crc kubenswrapper[4919]: I0930 20:27:03.113091 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-memberlist\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:03 crc kubenswrapper[4919]: E0930 20:27:03.113237 4919 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 30 20:27:03 crc kubenswrapper[4919]: E0930 20:27:03.113282 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-memberlist podName:2b1754b1-e5ff-4053-bd08-09773a42d4eb nodeName:}" failed. No retries permitted until 2025-09-30 20:27:05.113267699 +0000 UTC m=+810.229300826 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-memberlist") pod "speaker-zqj5g" (UID: "2b1754b1-e5ff-4053-bd08-09773a42d4eb") : secret "metallb-memberlist" not found Sep 30 20:27:03 crc kubenswrapper[4919]: I0930 20:27:03.121699 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5de2da45-8775-4784-9c80-810c6713751e-metrics-certs\") pod \"controller-5d688f5ffc-th6f6\" (UID: \"5de2da45-8775-4784-9c80-810c6713751e\") " pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:03 crc kubenswrapper[4919]: I0930 20:27:03.174065 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:03 crc kubenswrapper[4919]: I0930 20:27:03.530103 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-th6f6"] Sep 30 20:27:03 crc kubenswrapper[4919]: I0930 20:27:03.670195 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-th6f6" event={"ID":"5de2da45-8775-4784-9c80-810c6713751e","Type":"ContainerStarted","Data":"b8f644cec619f7746ed03d5f196fcb0ebe88fa2c41b43129161f51a0fcb09f84"} Sep 30 20:27:04 crc kubenswrapper[4919]: I0930 20:27:04.687965 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-th6f6" event={"ID":"5de2da45-8775-4784-9c80-810c6713751e","Type":"ContainerStarted","Data":"7ac79c06b2cae1ecd3039424e2d7816f396ed1da8bf759efcd8eba8ba973e9d9"} Sep 30 20:27:04 crc kubenswrapper[4919]: I0930 20:27:04.688334 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-th6f6" event={"ID":"5de2da45-8775-4784-9c80-810c6713751e","Type":"ContainerStarted","Data":"e1946aa26bb40aae4a72480f3ba79f2f8e5ceea07bc180af9537cb84d5047bbc"} Sep 30 20:27:04 crc kubenswrapper[4919]: I0930 20:27:04.689298 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:04 crc kubenswrapper[4919]: I0930 20:27:04.717283 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-th6f6" podStartSLOduration=3.717264503 podStartE2EDuration="3.717264503s" podCreationTimestamp="2025-09-30 20:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:27:04.714242095 +0000 UTC m=+809.830275242" watchObservedRunningTime="2025-09-30 20:27:04.717264503 +0000 UTC m=+809.833297630" Sep 30 20:27:05 crc kubenswrapper[4919]: I0930 20:27:05.152243 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-memberlist\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:05 crc kubenswrapper[4919]: I0930 20:27:05.158456 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/2b1754b1-e5ff-4053-bd08-09773a42d4eb-memberlist\") pod \"speaker-zqj5g\" (UID: \"2b1754b1-e5ff-4053-bd08-09773a42d4eb\") " pod="metallb-system/speaker-zqj5g" Sep 30 20:27:05 crc kubenswrapper[4919]: I0930 20:27:05.261752 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-zqj5g" Sep 30 20:27:05 crc kubenswrapper[4919]: I0930 20:27:05.698368 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-zqj5g" event={"ID":"2b1754b1-e5ff-4053-bd08-09773a42d4eb","Type":"ContainerStarted","Data":"85e1c43f8dfc93a1ab4cee05d148c08ce0a24278698f6a0b87d15785c611cd0e"} Sep 30 20:27:05 crc kubenswrapper[4919]: I0930 20:27:05.698675 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-zqj5g" event={"ID":"2b1754b1-e5ff-4053-bd08-09773a42d4eb","Type":"ContainerStarted","Data":"ac32f4204f9f67eb13fc8e2b0956a059ead488c48f3b772b506d150feeba459e"} Sep 30 20:27:06 crc kubenswrapper[4919]: I0930 20:27:06.712025 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-zqj5g" event={"ID":"2b1754b1-e5ff-4053-bd08-09773a42d4eb","Type":"ContainerStarted","Data":"df4c67e41322d100b9642f349401e91f91fb12df3cc08a95e0577bfa56a680f7"} Sep 30 20:27:07 crc kubenswrapper[4919]: I0930 20:27:07.719631 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-zqj5g" Sep 30 20:27:09 crc kubenswrapper[4919]: I0930 20:27:09.737429 4919 generic.go:334] "Generic (PLEG): container finished" podID="7fdc3545-02de-4073-b40a-249a1a858d3c" containerID="532dbadd113b79aad3fe391223f91bc225f6276d1ffa069e97247bf3586a2110" exitCode=0 Sep 30 20:27:09 crc kubenswrapper[4919]: I0930 20:27:09.737566 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-j8cms" event={"ID":"7fdc3545-02de-4073-b40a-249a1a858d3c","Type":"ContainerDied","Data":"532dbadd113b79aad3fe391223f91bc225f6276d1ffa069e97247bf3586a2110"} Sep 30 20:27:09 crc kubenswrapper[4919]: I0930 20:27:09.740512 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" event={"ID":"07ab26d9-dc69-47bf-9d23-b0f94cf42749","Type":"ContainerStarted","Data":"767db0dc8eca77e80beeef86d6b8711b5dd56168f77b3bcdc981edd0bb5eba1f"} Sep 30 20:27:09 crc kubenswrapper[4919]: I0930 20:27:09.740799 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" Sep 30 20:27:09 crc kubenswrapper[4919]: I0930 20:27:09.762081 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-zqj5g" podStartSLOduration=8.762052223 podStartE2EDuration="8.762052223s" podCreationTimestamp="2025-09-30 20:27:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:27:06.750764511 +0000 UTC m=+811.866797658" watchObservedRunningTime="2025-09-30 20:27:09.762052223 +0000 UTC m=+814.878085370" Sep 30 20:27:10 crc kubenswrapper[4919]: I0930 20:27:10.754208 4919 generic.go:334] "Generic (PLEG): container finished" podID="7fdc3545-02de-4073-b40a-249a1a858d3c" containerID="56a6a8b6507af734a2d12fdcb946b3bb418636b2e953642af71c393f98e7eaad" exitCode=0 Sep 30 20:27:10 crc kubenswrapper[4919]: I0930 20:27:10.754462 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-j8cms" event={"ID":"7fdc3545-02de-4073-b40a-249a1a858d3c","Type":"ContainerDied","Data":"56a6a8b6507af734a2d12fdcb946b3bb418636b2e953642af71c393f98e7eaad"} Sep 30 20:27:10 crc kubenswrapper[4919]: I0930 20:27:10.795378 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" podStartSLOduration=2.643880356 podStartE2EDuration="9.795360268s" podCreationTimestamp="2025-09-30 20:27:01 +0000 UTC" firstStartedPulling="2025-09-30 20:27:02.050317536 +0000 UTC m=+807.166350663" lastFinishedPulling="2025-09-30 20:27:09.201797448 +0000 UTC m=+814.317830575" observedRunningTime="2025-09-30 20:27:09.778576822 +0000 UTC m=+814.894609989" watchObservedRunningTime="2025-09-30 20:27:10.795360268 +0000 UTC m=+815.911393415" Sep 30 20:27:11 crc kubenswrapper[4919]: I0930 20:27:11.764202 4919 generic.go:334] "Generic (PLEG): container finished" podID="7fdc3545-02de-4073-b40a-249a1a858d3c" containerID="0b09da352b2505e1e3be1d0042777dbd5eb93590e9e8a9af032fdc3d78a09421" exitCode=0 Sep 30 20:27:11 crc kubenswrapper[4919]: I0930 20:27:11.764272 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-j8cms" event={"ID":"7fdc3545-02de-4073-b40a-249a1a858d3c","Type":"ContainerDied","Data":"0b09da352b2505e1e3be1d0042777dbd5eb93590e9e8a9af032fdc3d78a09421"} Sep 30 20:27:12 crc kubenswrapper[4919]: I0930 20:27:12.776411 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-j8cms" event={"ID":"7fdc3545-02de-4073-b40a-249a1a858d3c","Type":"ContainerStarted","Data":"9771d1acee32969bd3a742872044bbfd1576dc3f2bb721b8e166050b5bb42de1"} Sep 30 20:27:12 crc kubenswrapper[4919]: I0930 20:27:12.777037 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-j8cms" event={"ID":"7fdc3545-02de-4073-b40a-249a1a858d3c","Type":"ContainerStarted","Data":"2e8cf2ed397a26d4a4945388925da981a1d07b3956d479009ed5a3221ebb6f99"} Sep 30 20:27:12 crc kubenswrapper[4919]: I0930 20:27:12.777071 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-j8cms" event={"ID":"7fdc3545-02de-4073-b40a-249a1a858d3c","Type":"ContainerStarted","Data":"c3b5f22b6407a2bc79aa57f76da8ecf82e72719d1778041e379dffa90008a264"} Sep 30 20:27:12 crc kubenswrapper[4919]: I0930 20:27:12.777090 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-j8cms" event={"ID":"7fdc3545-02de-4073-b40a-249a1a858d3c","Type":"ContainerStarted","Data":"95195d908cc446ec68ac1a635de954e95fdf2463ebc797d249be638dc371b0e0"} Sep 30 20:27:12 crc kubenswrapper[4919]: I0930 20:27:12.777105 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-j8cms" event={"ID":"7fdc3545-02de-4073-b40a-249a1a858d3c","Type":"ContainerStarted","Data":"9aa0a73b95af3671a9e987f8da52525c0f2be3872979f0b043236289dff3a63c"} Sep 30 20:27:12 crc kubenswrapper[4919]: I0930 20:27:12.777118 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-j8cms" event={"ID":"7fdc3545-02de-4073-b40a-249a1a858d3c","Type":"ContainerStarted","Data":"52200412d2bcd543b0f083515c56a7028bac47faa0428c138ebbf9dab9418180"} Sep 30 20:27:12 crc kubenswrapper[4919]: I0930 20:27:12.777150 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:12 crc kubenswrapper[4919]: I0930 20:27:12.808526 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-j8cms" podStartSLOduration=4.381094764 podStartE2EDuration="11.808507167s" podCreationTimestamp="2025-09-30 20:27:01 +0000 UTC" firstStartedPulling="2025-09-30 20:27:01.78768629 +0000 UTC m=+806.903719417" lastFinishedPulling="2025-09-30 20:27:09.215098683 +0000 UTC m=+814.331131820" observedRunningTime="2025-09-30 20:27:12.806909581 +0000 UTC m=+817.922942718" watchObservedRunningTime="2025-09-30 20:27:12.808507167 +0000 UTC m=+817.924540304" Sep 30 20:27:13 crc kubenswrapper[4919]: I0930 20:27:13.177955 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-th6f6" Sep 30 20:27:15 crc kubenswrapper[4919]: I0930 20:27:15.267738 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-zqj5g" Sep 30 20:27:16 crc kubenswrapper[4919]: I0930 20:27:16.596620 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:16 crc kubenswrapper[4919]: I0930 20:27:16.674300 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:18 crc kubenswrapper[4919]: I0930 20:27:18.517525 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-hl6st"] Sep 30 20:27:18 crc kubenswrapper[4919]: I0930 20:27:18.519249 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-hl6st" Sep 30 20:27:18 crc kubenswrapper[4919]: I0930 20:27:18.521908 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-qlmv7" Sep 30 20:27:18 crc kubenswrapper[4919]: I0930 20:27:18.522576 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 30 20:27:18 crc kubenswrapper[4919]: I0930 20:27:18.523577 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 30 20:27:18 crc kubenswrapper[4919]: I0930 20:27:18.542648 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-hl6st"] Sep 30 20:27:18 crc kubenswrapper[4919]: I0930 20:27:18.631856 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-578f4\" (UniqueName: \"kubernetes.io/projected/6f109beb-213c-401e-ac6a-51e1582990c9-kube-api-access-578f4\") pod \"openstack-operator-index-hl6st\" (UID: \"6f109beb-213c-401e-ac6a-51e1582990c9\") " pod="openstack-operators/openstack-operator-index-hl6st" Sep 30 20:27:18 crc kubenswrapper[4919]: I0930 20:27:18.733388 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-578f4\" (UniqueName: \"kubernetes.io/projected/6f109beb-213c-401e-ac6a-51e1582990c9-kube-api-access-578f4\") pod \"openstack-operator-index-hl6st\" (UID: \"6f109beb-213c-401e-ac6a-51e1582990c9\") " pod="openstack-operators/openstack-operator-index-hl6st" Sep 30 20:27:18 crc kubenswrapper[4919]: I0930 20:27:18.751927 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-578f4\" (UniqueName: \"kubernetes.io/projected/6f109beb-213c-401e-ac6a-51e1582990c9-kube-api-access-578f4\") pod \"openstack-operator-index-hl6st\" (UID: \"6f109beb-213c-401e-ac6a-51e1582990c9\") " pod="openstack-operators/openstack-operator-index-hl6st" Sep 30 20:27:18 crc kubenswrapper[4919]: I0930 20:27:18.841585 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-hl6st" Sep 30 20:27:19 crc kubenswrapper[4919]: I0930 20:27:19.263529 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-hl6st"] Sep 30 20:27:19 crc kubenswrapper[4919]: I0930 20:27:19.829484 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-hl6st" event={"ID":"6f109beb-213c-401e-ac6a-51e1582990c9","Type":"ContainerStarted","Data":"2d2fb37682dc0423ddbe110d5daf2eec838b2498ef27616fc2545beffd04d8dd"} Sep 30 20:27:21 crc kubenswrapper[4919]: I0930 20:27:21.601179 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-j8cms" Sep 30 20:27:21 crc kubenswrapper[4919]: I0930 20:27:21.644027 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-dgcsl" Sep 30 20:27:21 crc kubenswrapper[4919]: I0930 20:27:21.866722 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-hl6st"] Sep 30 20:27:22 crc kubenswrapper[4919]: I0930 20:27:22.479418 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-6djkx"] Sep 30 20:27:22 crc kubenswrapper[4919]: I0930 20:27:22.480613 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6djkx" Sep 30 20:27:22 crc kubenswrapper[4919]: I0930 20:27:22.494004 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6djkx"] Sep 30 20:27:22 crc kubenswrapper[4919]: I0930 20:27:22.615055 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9cqm\" (UniqueName: \"kubernetes.io/projected/af135c02-a48d-4046-9412-120ee15f6ea3-kube-api-access-w9cqm\") pod \"openstack-operator-index-6djkx\" (UID: \"af135c02-a48d-4046-9412-120ee15f6ea3\") " pod="openstack-operators/openstack-operator-index-6djkx" Sep 30 20:27:22 crc kubenswrapper[4919]: I0930 20:27:22.716405 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9cqm\" (UniqueName: \"kubernetes.io/projected/af135c02-a48d-4046-9412-120ee15f6ea3-kube-api-access-w9cqm\") pod \"openstack-operator-index-6djkx\" (UID: \"af135c02-a48d-4046-9412-120ee15f6ea3\") " pod="openstack-operators/openstack-operator-index-6djkx" Sep 30 20:27:22 crc kubenswrapper[4919]: I0930 20:27:22.743332 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9cqm\" (UniqueName: \"kubernetes.io/projected/af135c02-a48d-4046-9412-120ee15f6ea3-kube-api-access-w9cqm\") pod \"openstack-operator-index-6djkx\" (UID: \"af135c02-a48d-4046-9412-120ee15f6ea3\") " pod="openstack-operators/openstack-operator-index-6djkx" Sep 30 20:27:22 crc kubenswrapper[4919]: I0930 20:27:22.813559 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6djkx" Sep 30 20:27:22 crc kubenswrapper[4919]: I0930 20:27:22.863134 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-hl6st" event={"ID":"6f109beb-213c-401e-ac6a-51e1582990c9","Type":"ContainerStarted","Data":"d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e"} Sep 30 20:27:22 crc kubenswrapper[4919]: I0930 20:27:22.863455 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-hl6st" podUID="6f109beb-213c-401e-ac6a-51e1582990c9" containerName="registry-server" containerID="cri-o://d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e" gracePeriod=2 Sep 30 20:27:22 crc kubenswrapper[4919]: I0930 20:27:22.898379 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-hl6st" podStartSLOduration=1.7326396960000001 podStartE2EDuration="4.898348857s" podCreationTimestamp="2025-09-30 20:27:18 +0000 UTC" firstStartedPulling="2025-09-30 20:27:19.275331245 +0000 UTC m=+824.391364402" lastFinishedPulling="2025-09-30 20:27:22.441040396 +0000 UTC m=+827.557073563" observedRunningTime="2025-09-30 20:27:22.891594081 +0000 UTC m=+828.007627258" watchObservedRunningTime="2025-09-30 20:27:22.898348857 +0000 UTC m=+828.014382014" Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.261093 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-hl6st" Sep 30 20:27:23 crc kubenswrapper[4919]: W0930 20:27:23.293847 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf135c02_a48d_4046_9412_120ee15f6ea3.slice/crio-d280f1b58fb48788f63bbec4773dfffece37b348696da66bbd229d436918df88 WatchSource:0}: Error finding container d280f1b58fb48788f63bbec4773dfffece37b348696da66bbd229d436918df88: Status 404 returned error can't find the container with id d280f1b58fb48788f63bbec4773dfffece37b348696da66bbd229d436918df88 Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.296110 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6djkx"] Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.426912 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-578f4\" (UniqueName: \"kubernetes.io/projected/6f109beb-213c-401e-ac6a-51e1582990c9-kube-api-access-578f4\") pod \"6f109beb-213c-401e-ac6a-51e1582990c9\" (UID: \"6f109beb-213c-401e-ac6a-51e1582990c9\") " Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.432736 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f109beb-213c-401e-ac6a-51e1582990c9-kube-api-access-578f4" (OuterVolumeSpecName: "kube-api-access-578f4") pod "6f109beb-213c-401e-ac6a-51e1582990c9" (UID: "6f109beb-213c-401e-ac6a-51e1582990c9"). InnerVolumeSpecName "kube-api-access-578f4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.528201 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-578f4\" (UniqueName: \"kubernetes.io/projected/6f109beb-213c-401e-ac6a-51e1582990c9-kube-api-access-578f4\") on node \"crc\" DevicePath \"\"" Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.869600 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6djkx" event={"ID":"af135c02-a48d-4046-9412-120ee15f6ea3","Type":"ContainerStarted","Data":"199e26ea15adaa4d61d09dad50e99657adf28cc5166f38c9d0d07b385525b0ab"} Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.869646 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6djkx" event={"ID":"af135c02-a48d-4046-9412-120ee15f6ea3","Type":"ContainerStarted","Data":"d280f1b58fb48788f63bbec4773dfffece37b348696da66bbd229d436918df88"} Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.871780 4919 generic.go:334] "Generic (PLEG): container finished" podID="6f109beb-213c-401e-ac6a-51e1582990c9" containerID="d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e" exitCode=0 Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.871830 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-hl6st" event={"ID":"6f109beb-213c-401e-ac6a-51e1582990c9","Type":"ContainerDied","Data":"d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e"} Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.871853 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-hl6st" Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.871876 4919 scope.go:117] "RemoveContainer" containerID="d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e" Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.871863 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-hl6st" event={"ID":"6f109beb-213c-401e-ac6a-51e1582990c9","Type":"ContainerDied","Data":"2d2fb37682dc0423ddbe110d5daf2eec838b2498ef27616fc2545beffd04d8dd"} Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.891118 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-6djkx" podStartSLOduration=1.835933075 podStartE2EDuration="1.891095185s" podCreationTimestamp="2025-09-30 20:27:22 +0000 UTC" firstStartedPulling="2025-09-30 20:27:23.298102429 +0000 UTC m=+828.414135566" lastFinishedPulling="2025-09-30 20:27:23.353264549 +0000 UTC m=+828.469297676" observedRunningTime="2025-09-30 20:27:23.889028805 +0000 UTC m=+829.005061982" watchObservedRunningTime="2025-09-30 20:27:23.891095185 +0000 UTC m=+829.007128312" Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.892448 4919 scope.go:117] "RemoveContainer" containerID="d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e" Sep 30 20:27:23 crc kubenswrapper[4919]: E0930 20:27:23.894637 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e\": container with ID starting with d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e not found: ID does not exist" containerID="d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e" Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.894844 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e"} err="failed to get container status \"d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e\": rpc error: code = NotFound desc = could not find container \"d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e\": container with ID starting with d03bd45fa33d245c8aad83b6da70b02b9ba8802e870f107a766bd89804d1248e not found: ID does not exist" Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.905852 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-hl6st"] Sep 30 20:27:23 crc kubenswrapper[4919]: I0930 20:27:23.916766 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-hl6st"] Sep 30 20:27:25 crc kubenswrapper[4919]: I0930 20:27:25.654930 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f109beb-213c-401e-ac6a-51e1582990c9" path="/var/lib/kubelet/pods/6f109beb-213c-401e-ac6a-51e1582990c9/volumes" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.481850 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dflt6"] Sep 30 20:27:27 crc kubenswrapper[4919]: E0930 20:27:27.482520 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f109beb-213c-401e-ac6a-51e1582990c9" containerName="registry-server" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.482535 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f109beb-213c-401e-ac6a-51e1582990c9" containerName="registry-server" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.482671 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f109beb-213c-401e-ac6a-51e1582990c9" containerName="registry-server" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.483759 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.509583 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dflt6"] Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.591073 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7rcf\" (UniqueName: \"kubernetes.io/projected/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-kube-api-access-d7rcf\") pod \"certified-operators-dflt6\" (UID: \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\") " pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.591156 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-utilities\") pod \"certified-operators-dflt6\" (UID: \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\") " pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.591370 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-catalog-content\") pod \"certified-operators-dflt6\" (UID: \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\") " pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.692546 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7rcf\" (UniqueName: \"kubernetes.io/projected/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-kube-api-access-d7rcf\") pod \"certified-operators-dflt6\" (UID: \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\") " pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.692618 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-utilities\") pod \"certified-operators-dflt6\" (UID: \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\") " pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.692701 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-catalog-content\") pod \"certified-operators-dflt6\" (UID: \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\") " pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.693536 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-utilities\") pod \"certified-operators-dflt6\" (UID: \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\") " pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.693696 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-catalog-content\") pod \"certified-operators-dflt6\" (UID: \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\") " pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.719986 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7rcf\" (UniqueName: \"kubernetes.io/projected/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-kube-api-access-d7rcf\") pod \"certified-operators-dflt6\" (UID: \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\") " pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:27 crc kubenswrapper[4919]: I0930 20:27:27.845901 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:28 crc kubenswrapper[4919]: I0930 20:27:28.309564 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dflt6"] Sep 30 20:27:28 crc kubenswrapper[4919]: I0930 20:27:28.925539 4919 generic.go:334] "Generic (PLEG): container finished" podID="8c680ba2-424a-4e61-8a19-6f3c9f4bd371" containerID="58dcf7db93dfafb0d0321bdefcb09fbbd618205df24128b2b3157f442f7b1738" exitCode=0 Sep 30 20:27:28 crc kubenswrapper[4919]: I0930 20:27:28.925601 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dflt6" event={"ID":"8c680ba2-424a-4e61-8a19-6f3c9f4bd371","Type":"ContainerDied","Data":"58dcf7db93dfafb0d0321bdefcb09fbbd618205df24128b2b3157f442f7b1738"} Sep 30 20:27:28 crc kubenswrapper[4919]: I0930 20:27:28.925645 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dflt6" event={"ID":"8c680ba2-424a-4e61-8a19-6f3c9f4bd371","Type":"ContainerStarted","Data":"ba69a13a3a0c436e495bbc635d390d77fd104234d6fe4122fb5c6bd0aaaab1d4"} Sep 30 20:27:29 crc kubenswrapper[4919]: I0930 20:27:29.934575 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dflt6" event={"ID":"8c680ba2-424a-4e61-8a19-6f3c9f4bd371","Type":"ContainerStarted","Data":"c0a54cb94cce71b8d3539cb1b62ac165514362a90f89f562a07be38b2306488c"} Sep 30 20:27:30 crc kubenswrapper[4919]: I0930 20:27:30.947346 4919 generic.go:334] "Generic (PLEG): container finished" podID="8c680ba2-424a-4e61-8a19-6f3c9f4bd371" containerID="c0a54cb94cce71b8d3539cb1b62ac165514362a90f89f562a07be38b2306488c" exitCode=0 Sep 30 20:27:30 crc kubenswrapper[4919]: I0930 20:27:30.947452 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dflt6" event={"ID":"8c680ba2-424a-4e61-8a19-6f3c9f4bd371","Type":"ContainerDied","Data":"c0a54cb94cce71b8d3539cb1b62ac165514362a90f89f562a07be38b2306488c"} Sep 30 20:27:31 crc kubenswrapper[4919]: I0930 20:27:31.955361 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dflt6" event={"ID":"8c680ba2-424a-4e61-8a19-6f3c9f4bd371","Type":"ContainerStarted","Data":"c27e5fa025d6878077ffc56816bc4c92e5f48050f816d266c5bfe04d562921d5"} Sep 30 20:27:31 crc kubenswrapper[4919]: I0930 20:27:31.974687 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dflt6" podStartSLOduration=2.521407616 podStartE2EDuration="4.974663416s" podCreationTimestamp="2025-09-30 20:27:27 +0000 UTC" firstStartedPulling="2025-09-30 20:27:28.928515643 +0000 UTC m=+834.044548810" lastFinishedPulling="2025-09-30 20:27:31.381771483 +0000 UTC m=+836.497804610" observedRunningTime="2025-09-30 20:27:31.970395332 +0000 UTC m=+837.086428469" watchObservedRunningTime="2025-09-30 20:27:31.974663416 +0000 UTC m=+837.090696543" Sep 30 20:27:32 crc kubenswrapper[4919]: I0930 20:27:32.813998 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-6djkx" Sep 30 20:27:32 crc kubenswrapper[4919]: I0930 20:27:32.814102 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-6djkx" Sep 30 20:27:32 crc kubenswrapper[4919]: I0930 20:27:32.856509 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-6djkx" Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.003819 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-6djkx" Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.477937 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l778p"] Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.480596 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.497002 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l778p"] Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.581677 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-utilities\") pod \"community-operators-l778p\" (UID: \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\") " pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.581729 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-catalog-content\") pod \"community-operators-l778p\" (UID: \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\") " pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.581765 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg24f\" (UniqueName: \"kubernetes.io/projected/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-kube-api-access-fg24f\") pod \"community-operators-l778p\" (UID: \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\") " pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.682971 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-utilities\") pod \"community-operators-l778p\" (UID: \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\") " pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.683015 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-catalog-content\") pod \"community-operators-l778p\" (UID: \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\") " pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.683041 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg24f\" (UniqueName: \"kubernetes.io/projected/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-kube-api-access-fg24f\") pod \"community-operators-l778p\" (UID: \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\") " pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.684286 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-catalog-content\") pod \"community-operators-l778p\" (UID: \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\") " pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.684430 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-utilities\") pod \"community-operators-l778p\" (UID: \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\") " pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.709878 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg24f\" (UniqueName: \"kubernetes.io/projected/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-kube-api-access-fg24f\") pod \"community-operators-l778p\" (UID: \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\") " pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:33 crc kubenswrapper[4919]: I0930 20:27:33.811149 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:34 crc kubenswrapper[4919]: I0930 20:27:34.308656 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l778p"] Sep 30 20:27:34 crc kubenswrapper[4919]: W0930 20:27:34.315068 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf630b267_4afe_4b2b_a0a9_7d6aef5e0fad.slice/crio-ef6185f8b55972fa8c0f7db1a702fcd3001b9ad7c5f0beb5576e7a12e2603977 WatchSource:0}: Error finding container ef6185f8b55972fa8c0f7db1a702fcd3001b9ad7c5f0beb5576e7a12e2603977: Status 404 returned error can't find the container with id ef6185f8b55972fa8c0f7db1a702fcd3001b9ad7c5f0beb5576e7a12e2603977 Sep 30 20:27:34 crc kubenswrapper[4919]: I0930 20:27:34.982365 4919 generic.go:334] "Generic (PLEG): container finished" podID="f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" containerID="90af94a93598a67e4a7064f64298d4f282c4977d2a4ed9a6a29510fa7360150f" exitCode=0 Sep 30 20:27:34 crc kubenswrapper[4919]: I0930 20:27:34.982561 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l778p" event={"ID":"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad","Type":"ContainerDied","Data":"90af94a93598a67e4a7064f64298d4f282c4977d2a4ed9a6a29510fa7360150f"} Sep 30 20:27:34 crc kubenswrapper[4919]: I0930 20:27:34.982863 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l778p" event={"ID":"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad","Type":"ContainerStarted","Data":"ef6185f8b55972fa8c0f7db1a702fcd3001b9ad7c5f0beb5576e7a12e2603977"} Sep 30 20:27:35 crc kubenswrapper[4919]: I0930 20:27:35.997184 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l778p" event={"ID":"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad","Type":"ContainerStarted","Data":"04ce7d1265f3546484be0e8f022e5ce65067446506b43e794ef382cb0df03032"} Sep 30 20:27:37 crc kubenswrapper[4919]: I0930 20:27:37.011604 4919 generic.go:334] "Generic (PLEG): container finished" podID="f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" containerID="04ce7d1265f3546484be0e8f022e5ce65067446506b43e794ef382cb0df03032" exitCode=0 Sep 30 20:27:37 crc kubenswrapper[4919]: I0930 20:27:37.012697 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l778p" event={"ID":"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad","Type":"ContainerDied","Data":"04ce7d1265f3546484be0e8f022e5ce65067446506b43e794ef382cb0df03032"} Sep 30 20:27:37 crc kubenswrapper[4919]: I0930 20:27:37.846495 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:37 crc kubenswrapper[4919]: I0930 20:27:37.846870 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:37 crc kubenswrapper[4919]: I0930 20:27:37.907052 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:38 crc kubenswrapper[4919]: I0930 20:27:38.022727 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l778p" event={"ID":"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad","Type":"ContainerStarted","Data":"ed50a419e60c404046cdc01377daee59c9729cc10b218e6329e7abd606f11b3c"} Sep 30 20:27:38 crc kubenswrapper[4919]: I0930 20:27:38.051583 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l778p" podStartSLOduration=2.422785667 podStartE2EDuration="5.051555979s" podCreationTimestamp="2025-09-30 20:27:33 +0000 UTC" firstStartedPulling="2025-09-30 20:27:34.985787254 +0000 UTC m=+840.101820391" lastFinishedPulling="2025-09-30 20:27:37.614557556 +0000 UTC m=+842.730590703" observedRunningTime="2025-09-30 20:27:38.042933959 +0000 UTC m=+843.158967096" watchObservedRunningTime="2025-09-30 20:27:38.051555979 +0000 UTC m=+843.167589116" Sep 30 20:27:38 crc kubenswrapper[4919]: I0930 20:27:38.088313 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:40 crc kubenswrapper[4919]: I0930 20:27:40.139059 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd"] Sep 30 20:27:40 crc kubenswrapper[4919]: I0930 20:27:40.140763 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:40 crc kubenswrapper[4919]: W0930 20:27:40.142364 4919 reflector.go:561] object-"openstack-operators"/"default-dockercfg-8z9w5": failed to list *v1.Secret: secrets "default-dockercfg-8z9w5" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Sep 30 20:27:40 crc kubenswrapper[4919]: E0930 20:27:40.142411 4919 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"default-dockercfg-8z9w5\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"default-dockercfg-8z9w5\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 30 20:27:40 crc kubenswrapper[4919]: I0930 20:27:40.153046 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd"] Sep 30 20:27:40 crc kubenswrapper[4919]: I0930 20:27:40.189569 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgn4l\" (UniqueName: \"kubernetes.io/projected/78f527bb-835b-438b-a68f-bc7a4ffc921f-kube-api-access-xgn4l\") pod \"0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd\" (UID: \"78f527bb-835b-438b-a68f-bc7a4ffc921f\") " pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:40 crc kubenswrapper[4919]: I0930 20:27:40.189866 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/78f527bb-835b-438b-a68f-bc7a4ffc921f-bundle\") pod \"0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd\" (UID: \"78f527bb-835b-438b-a68f-bc7a4ffc921f\") " pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:40 crc kubenswrapper[4919]: I0930 20:27:40.189922 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/78f527bb-835b-438b-a68f-bc7a4ffc921f-util\") pod \"0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd\" (UID: \"78f527bb-835b-438b-a68f-bc7a4ffc921f\") " pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:40 crc kubenswrapper[4919]: I0930 20:27:40.291349 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgn4l\" (UniqueName: \"kubernetes.io/projected/78f527bb-835b-438b-a68f-bc7a4ffc921f-kube-api-access-xgn4l\") pod \"0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd\" (UID: \"78f527bb-835b-438b-a68f-bc7a4ffc921f\") " pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:40 crc kubenswrapper[4919]: I0930 20:27:40.291458 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/78f527bb-835b-438b-a68f-bc7a4ffc921f-bundle\") pod \"0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd\" (UID: \"78f527bb-835b-438b-a68f-bc7a4ffc921f\") " pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:40 crc kubenswrapper[4919]: I0930 20:27:40.291480 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/78f527bb-835b-438b-a68f-bc7a4ffc921f-util\") pod \"0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd\" (UID: \"78f527bb-835b-438b-a68f-bc7a4ffc921f\") " pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:40 crc kubenswrapper[4919]: I0930 20:27:40.292132 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/78f527bb-835b-438b-a68f-bc7a4ffc921f-util\") pod \"0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd\" (UID: \"78f527bb-835b-438b-a68f-bc7a4ffc921f\") " pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:40 crc kubenswrapper[4919]: I0930 20:27:40.292125 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/78f527bb-835b-438b-a68f-bc7a4ffc921f-bundle\") pod \"0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd\" (UID: \"78f527bb-835b-438b-a68f-bc7a4ffc921f\") " pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:40 crc kubenswrapper[4919]: I0930 20:27:40.309336 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgn4l\" (UniqueName: \"kubernetes.io/projected/78f527bb-835b-438b-a68f-bc7a4ffc921f-kube-api-access-xgn4l\") pod \"0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd\" (UID: \"78f527bb-835b-438b-a68f-bc7a4ffc921f\") " pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:41 crc kubenswrapper[4919]: I0930 20:27:41.456786 4919 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" secret="" err="failed to sync secret cache: timed out waiting for the condition" Sep 30 20:27:41 crc kubenswrapper[4919]: I0930 20:27:41.456862 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:41 crc kubenswrapper[4919]: I0930 20:27:41.469693 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dflt6"] Sep 30 20:27:41 crc kubenswrapper[4919]: I0930 20:27:41.470284 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dflt6" podUID="8c680ba2-424a-4e61-8a19-6f3c9f4bd371" containerName="registry-server" containerID="cri-o://c27e5fa025d6878077ffc56816bc4c92e5f48050f816d266c5bfe04d562921d5" gracePeriod=2 Sep 30 20:27:41 crc kubenswrapper[4919]: I0930 20:27:41.520797 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-8z9w5" Sep 30 20:27:41 crc kubenswrapper[4919]: I0930 20:27:41.899509 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd"] Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.050354 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" event={"ID":"78f527bb-835b-438b-a68f-bc7a4ffc921f","Type":"ContainerStarted","Data":"075f59ca1819cb1150d6711084090021db36509aa648b57ac8a46dcf6686a631"} Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.050409 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" event={"ID":"78f527bb-835b-438b-a68f-bc7a4ffc921f","Type":"ContainerStarted","Data":"67cb0638a0ff50dce7cf09139edfbe432aa898d5a86e383b1996496d99b693a5"} Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.052503 4919 generic.go:334] "Generic (PLEG): container finished" podID="8c680ba2-424a-4e61-8a19-6f3c9f4bd371" containerID="c27e5fa025d6878077ffc56816bc4c92e5f48050f816d266c5bfe04d562921d5" exitCode=0 Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.052531 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dflt6" event={"ID":"8c680ba2-424a-4e61-8a19-6f3c9f4bd371","Type":"ContainerDied","Data":"c27e5fa025d6878077ffc56816bc4c92e5f48050f816d266c5bfe04d562921d5"} Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.311513 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.429315 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7rcf\" (UniqueName: \"kubernetes.io/projected/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-kube-api-access-d7rcf\") pod \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\" (UID: \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\") " Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.429398 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-catalog-content\") pod \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\" (UID: \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\") " Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.429514 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-utilities\") pod \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\" (UID: \"8c680ba2-424a-4e61-8a19-6f3c9f4bd371\") " Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.430330 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-utilities" (OuterVolumeSpecName: "utilities") pod "8c680ba2-424a-4e61-8a19-6f3c9f4bd371" (UID: "8c680ba2-424a-4e61-8a19-6f3c9f4bd371"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.435859 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-kube-api-access-d7rcf" (OuterVolumeSpecName: "kube-api-access-d7rcf") pod "8c680ba2-424a-4e61-8a19-6f3c9f4bd371" (UID: "8c680ba2-424a-4e61-8a19-6f3c9f4bd371"). InnerVolumeSpecName "kube-api-access-d7rcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.480624 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c680ba2-424a-4e61-8a19-6f3c9f4bd371" (UID: "8c680ba2-424a-4e61-8a19-6f3c9f4bd371"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.531737 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7rcf\" (UniqueName: \"kubernetes.io/projected/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-kube-api-access-d7rcf\") on node \"crc\" DevicePath \"\"" Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.531774 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:27:42 crc kubenswrapper[4919]: I0930 20:27:42.531787 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c680ba2-424a-4e61-8a19-6f3c9f4bd371-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.061646 4919 generic.go:334] "Generic (PLEG): container finished" podID="78f527bb-835b-438b-a68f-bc7a4ffc921f" containerID="075f59ca1819cb1150d6711084090021db36509aa648b57ac8a46dcf6686a631" exitCode=0 Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.061836 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" event={"ID":"78f527bb-835b-438b-a68f-bc7a4ffc921f","Type":"ContainerDied","Data":"075f59ca1819cb1150d6711084090021db36509aa648b57ac8a46dcf6686a631"} Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.066772 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dflt6" event={"ID":"8c680ba2-424a-4e61-8a19-6f3c9f4bd371","Type":"ContainerDied","Data":"ba69a13a3a0c436e495bbc635d390d77fd104234d6fe4122fb5c6bd0aaaab1d4"} Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.066851 4919 scope.go:117] "RemoveContainer" containerID="c27e5fa025d6878077ffc56816bc4c92e5f48050f816d266c5bfe04d562921d5" Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.066878 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dflt6" Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.094019 4919 scope.go:117] "RemoveContainer" containerID="c0a54cb94cce71b8d3539cb1b62ac165514362a90f89f562a07be38b2306488c" Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.115777 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dflt6"] Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.122321 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dflt6"] Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.137270 4919 scope.go:117] "RemoveContainer" containerID="58dcf7db93dfafb0d0321bdefcb09fbbd618205df24128b2b3157f442f7b1738" Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.646114 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c680ba2-424a-4e61-8a19-6f3c9f4bd371" path="/var/lib/kubelet/pods/8c680ba2-424a-4e61-8a19-6f3c9f4bd371/volumes" Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.812421 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.812494 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:43 crc kubenswrapper[4919]: I0930 20:27:43.884972 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:44 crc kubenswrapper[4919]: I0930 20:27:44.074256 4919 generic.go:334] "Generic (PLEG): container finished" podID="78f527bb-835b-438b-a68f-bc7a4ffc921f" containerID="be402b22345a26f7e64dc61656f6978bc5866632a790b6a84b5d0422ca03e89f" exitCode=0 Sep 30 20:27:44 crc kubenswrapper[4919]: I0930 20:27:44.074350 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" event={"ID":"78f527bb-835b-438b-a68f-bc7a4ffc921f","Type":"ContainerDied","Data":"be402b22345a26f7e64dc61656f6978bc5866632a790b6a84b5d0422ca03e89f"} Sep 30 20:27:44 crc kubenswrapper[4919]: I0930 20:27:44.155561 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:45 crc kubenswrapper[4919]: I0930 20:27:45.086506 4919 generic.go:334] "Generic (PLEG): container finished" podID="78f527bb-835b-438b-a68f-bc7a4ffc921f" containerID="ae2b063e88927a5297dbb465d246220e3013498f56a489e66e3ef07e80770054" exitCode=0 Sep 30 20:27:45 crc kubenswrapper[4919]: I0930 20:27:45.086584 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" event={"ID":"78f527bb-835b-438b-a68f-bc7a4ffc921f","Type":"ContainerDied","Data":"ae2b063e88927a5297dbb465d246220e3013498f56a489e66e3ef07e80770054"} Sep 30 20:27:46 crc kubenswrapper[4919]: I0930 20:27:46.496567 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:46 crc kubenswrapper[4919]: I0930 20:27:46.600691 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgn4l\" (UniqueName: \"kubernetes.io/projected/78f527bb-835b-438b-a68f-bc7a4ffc921f-kube-api-access-xgn4l\") pod \"78f527bb-835b-438b-a68f-bc7a4ffc921f\" (UID: \"78f527bb-835b-438b-a68f-bc7a4ffc921f\") " Sep 30 20:27:46 crc kubenswrapper[4919]: I0930 20:27:46.600736 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/78f527bb-835b-438b-a68f-bc7a4ffc921f-util\") pod \"78f527bb-835b-438b-a68f-bc7a4ffc921f\" (UID: \"78f527bb-835b-438b-a68f-bc7a4ffc921f\") " Sep 30 20:27:46 crc kubenswrapper[4919]: I0930 20:27:46.600795 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/78f527bb-835b-438b-a68f-bc7a4ffc921f-bundle\") pod \"78f527bb-835b-438b-a68f-bc7a4ffc921f\" (UID: \"78f527bb-835b-438b-a68f-bc7a4ffc921f\") " Sep 30 20:27:46 crc kubenswrapper[4919]: I0930 20:27:46.601421 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78f527bb-835b-438b-a68f-bc7a4ffc921f-bundle" (OuterVolumeSpecName: "bundle") pod "78f527bb-835b-438b-a68f-bc7a4ffc921f" (UID: "78f527bb-835b-438b-a68f-bc7a4ffc921f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:27:46 crc kubenswrapper[4919]: I0930 20:27:46.614291 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78f527bb-835b-438b-a68f-bc7a4ffc921f-util" (OuterVolumeSpecName: "util") pod "78f527bb-835b-438b-a68f-bc7a4ffc921f" (UID: "78f527bb-835b-438b-a68f-bc7a4ffc921f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:27:46 crc kubenswrapper[4919]: I0930 20:27:46.615686 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78f527bb-835b-438b-a68f-bc7a4ffc921f-kube-api-access-xgn4l" (OuterVolumeSpecName: "kube-api-access-xgn4l") pod "78f527bb-835b-438b-a68f-bc7a4ffc921f" (UID: "78f527bb-835b-438b-a68f-bc7a4ffc921f"). InnerVolumeSpecName "kube-api-access-xgn4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:27:46 crc kubenswrapper[4919]: I0930 20:27:46.702297 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgn4l\" (UniqueName: \"kubernetes.io/projected/78f527bb-835b-438b-a68f-bc7a4ffc921f-kube-api-access-xgn4l\") on node \"crc\" DevicePath \"\"" Sep 30 20:27:46 crc kubenswrapper[4919]: I0930 20:27:46.702543 4919 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/78f527bb-835b-438b-a68f-bc7a4ffc921f-util\") on node \"crc\" DevicePath \"\"" Sep 30 20:27:46 crc kubenswrapper[4919]: I0930 20:27:46.702557 4919 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/78f527bb-835b-438b-a68f-bc7a4ffc921f-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:27:47 crc kubenswrapper[4919]: I0930 20:27:47.106642 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" event={"ID":"78f527bb-835b-438b-a68f-bc7a4ffc921f","Type":"ContainerDied","Data":"67cb0638a0ff50dce7cf09139edfbe432aa898d5a86e383b1996496d99b693a5"} Sep 30 20:27:47 crc kubenswrapper[4919]: I0930 20:27:47.106680 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67cb0638a0ff50dce7cf09139edfbe432aa898d5a86e383b1996496d99b693a5" Sep 30 20:27:47 crc kubenswrapper[4919]: I0930 20:27:47.106752 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd" Sep 30 20:27:47 crc kubenswrapper[4919]: I0930 20:27:47.671624 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l778p"] Sep 30 20:27:47 crc kubenswrapper[4919]: I0930 20:27:47.672753 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l778p" podUID="f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" containerName="registry-server" containerID="cri-o://ed50a419e60c404046cdc01377daee59c9729cc10b218e6329e7abd606f11b3c" gracePeriod=2 Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.117059 4919 generic.go:334] "Generic (PLEG): container finished" podID="f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" containerID="ed50a419e60c404046cdc01377daee59c9729cc10b218e6329e7abd606f11b3c" exitCode=0 Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.117112 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l778p" event={"ID":"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad","Type":"ContainerDied","Data":"ed50a419e60c404046cdc01377daee59c9729cc10b218e6329e7abd606f11b3c"} Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.117444 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l778p" event={"ID":"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad","Type":"ContainerDied","Data":"ef6185f8b55972fa8c0f7db1a702fcd3001b9ad7c5f0beb5576e7a12e2603977"} Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.117539 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef6185f8b55972fa8c0f7db1a702fcd3001b9ad7c5f0beb5576e7a12e2603977" Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.147905 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.223744 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-catalog-content\") pod \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\" (UID: \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\") " Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.223801 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-utilities\") pod \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\" (UID: \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\") " Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.223836 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fg24f\" (UniqueName: \"kubernetes.io/projected/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-kube-api-access-fg24f\") pod \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\" (UID: \"f630b267-4afe-4b2b-a0a9-7d6aef5e0fad\") " Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.225449 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-utilities" (OuterVolumeSpecName: "utilities") pod "f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" (UID: "f630b267-4afe-4b2b-a0a9-7d6aef5e0fad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.229364 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-kube-api-access-fg24f" (OuterVolumeSpecName: "kube-api-access-fg24f") pod "f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" (UID: "f630b267-4afe-4b2b-a0a9-7d6aef5e0fad"). InnerVolumeSpecName "kube-api-access-fg24f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.272450 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" (UID: "f630b267-4afe-4b2b-a0a9-7d6aef5e0fad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.325560 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.325624 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fg24f\" (UniqueName: \"kubernetes.io/projected/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-kube-api-access-fg24f\") on node \"crc\" DevicePath \"\"" Sep 30 20:27:48 crc kubenswrapper[4919]: I0930 20:27:48.325636 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:27:49 crc kubenswrapper[4919]: I0930 20:27:49.125010 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l778p" Sep 30 20:27:49 crc kubenswrapper[4919]: I0930 20:27:49.190183 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l778p"] Sep 30 20:27:49 crc kubenswrapper[4919]: I0930 20:27:49.194646 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l778p"] Sep 30 20:27:49 crc kubenswrapper[4919]: I0930 20:27:49.640108 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" path="/var/lib/kubelet/pods/f630b267-4afe-4b2b-a0a9-7d6aef5e0fad/volumes" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.059950 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8"] Sep 30 20:27:51 crc kubenswrapper[4919]: E0930 20:27:51.061206 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c680ba2-424a-4e61-8a19-6f3c9f4bd371" containerName="registry-server" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.061323 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c680ba2-424a-4e61-8a19-6f3c9f4bd371" containerName="registry-server" Sep 30 20:27:51 crc kubenswrapper[4919]: E0930 20:27:51.061403 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" containerName="extract-utilities" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.061471 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" containerName="extract-utilities" Sep 30 20:27:51 crc kubenswrapper[4919]: E0930 20:27:51.061549 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78f527bb-835b-438b-a68f-bc7a4ffc921f" containerName="pull" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.061623 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="78f527bb-835b-438b-a68f-bc7a4ffc921f" containerName="pull" Sep 30 20:27:51 crc kubenswrapper[4919]: E0930 20:27:51.061680 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c680ba2-424a-4e61-8a19-6f3c9f4bd371" containerName="extract-content" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.061737 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c680ba2-424a-4e61-8a19-6f3c9f4bd371" containerName="extract-content" Sep 30 20:27:51 crc kubenswrapper[4919]: E0930 20:27:51.061797 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" containerName="registry-server" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.061846 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" containerName="registry-server" Sep 30 20:27:51 crc kubenswrapper[4919]: E0930 20:27:51.061897 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c680ba2-424a-4e61-8a19-6f3c9f4bd371" containerName="extract-utilities" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.061944 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c680ba2-424a-4e61-8a19-6f3c9f4bd371" containerName="extract-utilities" Sep 30 20:27:51 crc kubenswrapper[4919]: E0930 20:27:51.062003 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78f527bb-835b-438b-a68f-bc7a4ffc921f" containerName="util" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.062072 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="78f527bb-835b-438b-a68f-bc7a4ffc921f" containerName="util" Sep 30 20:27:51 crc kubenswrapper[4919]: E0930 20:27:51.062148 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" containerName="extract-content" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.062251 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" containerName="extract-content" Sep 30 20:27:51 crc kubenswrapper[4919]: E0930 20:27:51.062345 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78f527bb-835b-438b-a68f-bc7a4ffc921f" containerName="extract" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.062412 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="78f527bb-835b-438b-a68f-bc7a4ffc921f" containerName="extract" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.062606 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="f630b267-4afe-4b2b-a0a9-7d6aef5e0fad" containerName="registry-server" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.062691 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c680ba2-424a-4e61-8a19-6f3c9f4bd371" containerName="registry-server" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.062761 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="78f527bb-835b-438b-a68f-bc7a4ffc921f" containerName="extract" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.063571 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.065640 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-6m82h" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.087054 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8"] Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.168843 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdr9n\" (UniqueName: \"kubernetes.io/projected/e471c350-4736-4417-8d84-8643b8da1be2-kube-api-access-xdr9n\") pod \"openstack-operator-controller-operator-59b4657894-mfdh8\" (UID: \"e471c350-4736-4417-8d84-8643b8da1be2\") " pod="openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.270655 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdr9n\" (UniqueName: \"kubernetes.io/projected/e471c350-4736-4417-8d84-8643b8da1be2-kube-api-access-xdr9n\") pod \"openstack-operator-controller-operator-59b4657894-mfdh8\" (UID: \"e471c350-4736-4417-8d84-8643b8da1be2\") " pod="openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.300781 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdr9n\" (UniqueName: \"kubernetes.io/projected/e471c350-4736-4417-8d84-8643b8da1be2-kube-api-access-xdr9n\") pod \"openstack-operator-controller-operator-59b4657894-mfdh8\" (UID: \"e471c350-4736-4417-8d84-8643b8da1be2\") " pod="openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.382921 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8" Sep 30 20:27:51 crc kubenswrapper[4919]: I0930 20:27:51.931468 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8"] Sep 30 20:27:52 crc kubenswrapper[4919]: I0930 20:27:52.144948 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8" event={"ID":"e471c350-4736-4417-8d84-8643b8da1be2","Type":"ContainerStarted","Data":"a3c5b40c14ec6f1344452a62e953a91446f8ddca038ba9749503b8bdd2cb5250"} Sep 30 20:27:56 crc kubenswrapper[4919]: I0930 20:27:56.186098 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8" event={"ID":"e471c350-4736-4417-8d84-8643b8da1be2","Type":"ContainerStarted","Data":"432273e4a52d6cc1d198315ed1bfc9b223a7269f4530d2b258cae0f62a9c0d16"} Sep 30 20:27:58 crc kubenswrapper[4919]: I0930 20:27:58.201624 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8" event={"ID":"e471c350-4736-4417-8d84-8643b8da1be2","Type":"ContainerStarted","Data":"09fa7fbe925b53fd4ba78616be3975b7f400b6cae5367db81fe2366103fa9f3d"} Sep 30 20:27:58 crc kubenswrapper[4919]: I0930 20:27:58.202282 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8" Sep 30 20:27:58 crc kubenswrapper[4919]: I0930 20:27:58.242781 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8" podStartSLOduration=1.757685324 podStartE2EDuration="7.242761482s" podCreationTimestamp="2025-09-30 20:27:51 +0000 UTC" firstStartedPulling="2025-09-30 20:27:51.945635516 +0000 UTC m=+857.061668643" lastFinishedPulling="2025-09-30 20:27:57.430711684 +0000 UTC m=+862.546744801" observedRunningTime="2025-09-30 20:27:58.241950569 +0000 UTC m=+863.357983696" watchObservedRunningTime="2025-09-30 20:27:58.242761482 +0000 UTC m=+863.358794609" Sep 30 20:28:01 crc kubenswrapper[4919]: I0930 20:28:01.385809 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-59b4657894-mfdh8" Sep 30 20:28:26 crc kubenswrapper[4919]: I0930 20:28:26.062527 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:28:26 crc kubenswrapper[4919]: I0930 20:28:26.063238 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.264273 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.265658 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.267975 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.268732 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.269661 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-xth7c" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.270255 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-84t6s" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.277592 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.281052 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-k6295"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.281930 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.286466 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-zfffx" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.300951 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zjcw\" (UniqueName: \"kubernetes.io/projected/fd8b2eba-9c90-4a16-b470-6e43eaa38f4d-kube-api-access-5zjcw\") pod \"barbican-operator-controller-manager-6ff8b75857-nfpc8\" (UID: \"fd8b2eba-9c90-4a16-b470-6e43eaa38f4d\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.301421 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdrl5\" (UniqueName: \"kubernetes.io/projected/35922b82-d9a9-425b-89e2-919fd9d937dd-kube-api-access-vdrl5\") pod \"cinder-operator-controller-manager-644bddb6d8-7vfvh\" (UID: \"35922b82-d9a9-425b-89e2-919fd9d937dd\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.307822 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.313106 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.314603 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.316736 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-7g4bs" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.324014 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-k6295"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.343561 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.345638 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.347151 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.359242 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-22bxk" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.362598 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.388273 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.389173 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.392911 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-f95ps" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.391907 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.395207 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.401821 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-mr89t" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.403905 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l8zf\" (UniqueName: \"kubernetes.io/projected/75189fe6-5b26-4743-b2e2-8e0fee41c653-kube-api-access-6l8zf\") pod \"designate-operator-controller-manager-84f4f7b77b-mjllk\" (UID: \"75189fe6-5b26-4743-b2e2-8e0fee41c653\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.404057 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tc9wn\" (UniqueName: \"kubernetes.io/projected/1fbeecba-9bf8-44ef-819b-63bcf26ce691-kube-api-access-tc9wn\") pod \"glance-operator-controller-manager-84958c4d49-k6295\" (UID: \"1fbeecba-9bf8-44ef-819b-63bcf26ce691\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.404170 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d49rz\" (UniqueName: \"kubernetes.io/projected/7fc088d5-3fb5-40a2-b086-c1a4e52a325e-kube-api-access-d49rz\") pod \"heat-operator-controller-manager-5d889d78cf-5dtjx\" (UID: \"7fc088d5-3fb5-40a2-b086-c1a4e52a325e\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.404366 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdrl5\" (UniqueName: \"kubernetes.io/projected/35922b82-d9a9-425b-89e2-919fd9d937dd-kube-api-access-vdrl5\") pod \"cinder-operator-controller-manager-644bddb6d8-7vfvh\" (UID: \"35922b82-d9a9-425b-89e2-919fd9d937dd\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.404513 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zjcw\" (UniqueName: \"kubernetes.io/projected/fd8b2eba-9c90-4a16-b470-6e43eaa38f4d-kube-api-access-5zjcw\") pod \"barbican-operator-controller-manager-6ff8b75857-nfpc8\" (UID: \"fd8b2eba-9c90-4a16-b470-6e43eaa38f4d\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.409324 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.433624 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.453228 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zjcw\" (UniqueName: \"kubernetes.io/projected/fd8b2eba-9c90-4a16-b470-6e43eaa38f4d-kube-api-access-5zjcw\") pod \"barbican-operator-controller-manager-6ff8b75857-nfpc8\" (UID: \"fd8b2eba-9c90-4a16-b470-6e43eaa38f4d\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.453557 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.457898 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdrl5\" (UniqueName: \"kubernetes.io/projected/35922b82-d9a9-425b-89e2-919fd9d937dd-kube-api-access-vdrl5\") pod \"cinder-operator-controller-manager-644bddb6d8-7vfvh\" (UID: \"35922b82-d9a9-425b-89e2-919fd9d937dd\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.476274 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.477533 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.479924 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-wmjg8" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.482453 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.483656 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.494027 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-5s7bn" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.502962 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.509661 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m94gq\" (UniqueName: \"kubernetes.io/projected/469c99b8-4171-48c7-9091-fbab0c200c11-kube-api-access-m94gq\") pod \"keystone-operator-controller-manager-5bd55b4bff-p9rsr\" (UID: \"469c99b8-4171-48c7-9091-fbab0c200c11\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.509880 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l8zf\" (UniqueName: \"kubernetes.io/projected/75189fe6-5b26-4743-b2e2-8e0fee41c653-kube-api-access-6l8zf\") pod \"designate-operator-controller-manager-84f4f7b77b-mjllk\" (UID: \"75189fe6-5b26-4743-b2e2-8e0fee41c653\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.509960 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tc9wn\" (UniqueName: \"kubernetes.io/projected/1fbeecba-9bf8-44ef-819b-63bcf26ce691-kube-api-access-tc9wn\") pod \"glance-operator-controller-manager-84958c4d49-k6295\" (UID: \"1fbeecba-9bf8-44ef-819b-63bcf26ce691\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.510061 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d49rz\" (UniqueName: \"kubernetes.io/projected/7fc088d5-3fb5-40a2-b086-c1a4e52a325e-kube-api-access-d49rz\") pod \"heat-operator-controller-manager-5d889d78cf-5dtjx\" (UID: \"7fc088d5-3fb5-40a2-b086-c1a4e52a325e\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.510240 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1053b07d-a2f6-4580-8edd-65e680622c9e-cert\") pod \"infra-operator-controller-manager-9d6c5db85-v96qz\" (UID: \"1053b07d-a2f6-4580-8edd-65e680622c9e\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.510549 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6t72\" (UniqueName: \"kubernetes.io/projected/94b17ff0-8f16-4683-8153-a0d8b2b55437-kube-api-access-t6t72\") pod \"horizon-operator-controller-manager-9f4696d94-w2t4d\" (UID: \"94b17ff0-8f16-4683-8153-a0d8b2b55437\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.510630 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r5xl\" (UniqueName: \"kubernetes.io/projected/1053b07d-a2f6-4580-8edd-65e680622c9e-kube-api-access-4r5xl\") pod \"infra-operator-controller-manager-9d6c5db85-v96qz\" (UID: \"1053b07d-a2f6-4580-8edd-65e680622c9e\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.510708 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ql6m\" (UniqueName: \"kubernetes.io/projected/570ac8cc-5b75-4404-9df9-36387db5e5aa-kube-api-access-7ql6m\") pod \"ironic-operator-controller-manager-7975b88857-2w4vp\" (UID: \"570ac8cc-5b75-4404-9df9-36387db5e5aa\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.515279 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.524881 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.525695 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.527517 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-l9bwb" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.560142 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d49rz\" (UniqueName: \"kubernetes.io/projected/7fc088d5-3fb5-40a2-b086-c1a4e52a325e-kube-api-access-d49rz\") pod \"heat-operator-controller-manager-5d889d78cf-5dtjx\" (UID: \"7fc088d5-3fb5-40a2-b086-c1a4e52a325e\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.577517 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tc9wn\" (UniqueName: \"kubernetes.io/projected/1fbeecba-9bf8-44ef-819b-63bcf26ce691-kube-api-access-tc9wn\") pod \"glance-operator-controller-manager-84958c4d49-k6295\" (UID: \"1fbeecba-9bf8-44ef-819b-63bcf26ce691\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.589960 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.598818 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.613266 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m94gq\" (UniqueName: \"kubernetes.io/projected/469c99b8-4171-48c7-9091-fbab0c200c11-kube-api-access-m94gq\") pod \"keystone-operator-controller-manager-5bd55b4bff-p9rsr\" (UID: \"469c99b8-4171-48c7-9091-fbab0c200c11\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.613369 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1053b07d-a2f6-4580-8edd-65e680622c9e-cert\") pod \"infra-operator-controller-manager-9d6c5db85-v96qz\" (UID: \"1053b07d-a2f6-4580-8edd-65e680622c9e\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.613407 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6t72\" (UniqueName: \"kubernetes.io/projected/94b17ff0-8f16-4683-8153-a0d8b2b55437-kube-api-access-t6t72\") pod \"horizon-operator-controller-manager-9f4696d94-w2t4d\" (UID: \"94b17ff0-8f16-4683-8153-a0d8b2b55437\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.613432 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftkdr\" (UniqueName: \"kubernetes.io/projected/8b7b2889-ed1b-45b0-909c-011b3fbee825-kube-api-access-ftkdr\") pod \"manila-operator-controller-manager-6d68dbc695-2mrbb\" (UID: \"8b7b2889-ed1b-45b0-909c-011b3fbee825\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.613457 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r5xl\" (UniqueName: \"kubernetes.io/projected/1053b07d-a2f6-4580-8edd-65e680622c9e-kube-api-access-4r5xl\") pod \"infra-operator-controller-manager-9d6c5db85-v96qz\" (UID: \"1053b07d-a2f6-4580-8edd-65e680622c9e\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.613483 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ql6m\" (UniqueName: \"kubernetes.io/projected/570ac8cc-5b75-4404-9df9-36387db5e5aa-kube-api-access-7ql6m\") pod \"ironic-operator-controller-manager-7975b88857-2w4vp\" (UID: \"570ac8cc-5b75-4404-9df9-36387db5e5aa\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp" Sep 30 20:28:39 crc kubenswrapper[4919]: E0930 20:28:39.613849 4919 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 30 20:28:39 crc kubenswrapper[4919]: E0930 20:28:39.613895 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1053b07d-a2f6-4580-8edd-65e680622c9e-cert podName:1053b07d-a2f6-4580-8edd-65e680622c9e nodeName:}" failed. No retries permitted until 2025-09-30 20:28:40.113875471 +0000 UTC m=+905.229908598 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/1053b07d-a2f6-4580-8edd-65e680622c9e-cert") pod "infra-operator-controller-manager-9d6c5db85-v96qz" (UID: "1053b07d-a2f6-4580-8edd-65e680622c9e") : secret "infra-operator-webhook-server-cert" not found Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.614310 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.632393 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l8zf\" (UniqueName: \"kubernetes.io/projected/75189fe6-5b26-4743-b2e2-8e0fee41c653-kube-api-access-6l8zf\") pod \"designate-operator-controller-manager-84f4f7b77b-mjllk\" (UID: \"75189fe6-5b26-4743-b2e2-8e0fee41c653\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.694618 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.705297 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.708257 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6t72\" (UniqueName: \"kubernetes.io/projected/94b17ff0-8f16-4683-8153-a0d8b2b55437-kube-api-access-t6t72\") pod \"horizon-operator-controller-manager-9f4696d94-w2t4d\" (UID: \"94b17ff0-8f16-4683-8153-a0d8b2b55437\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.709915 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r5xl\" (UniqueName: \"kubernetes.io/projected/1053b07d-a2f6-4580-8edd-65e680622c9e-kube-api-access-4r5xl\") pod \"infra-operator-controller-manager-9d6c5db85-v96qz\" (UID: \"1053b07d-a2f6-4580-8edd-65e680622c9e\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.714904 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftkdr\" (UniqueName: \"kubernetes.io/projected/8b7b2889-ed1b-45b0-909c-011b3fbee825-kube-api-access-ftkdr\") pod \"manila-operator-controller-manager-6d68dbc695-2mrbb\" (UID: \"8b7b2889-ed1b-45b0-909c-011b3fbee825\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.728523 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.730623 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ql6m\" (UniqueName: \"kubernetes.io/projected/570ac8cc-5b75-4404-9df9-36387db5e5aa-kube-api-access-7ql6m\") pod \"ironic-operator-controller-manager-7975b88857-2w4vp\" (UID: \"570ac8cc-5b75-4404-9df9-36387db5e5aa\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.732192 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m94gq\" (UniqueName: \"kubernetes.io/projected/469c99b8-4171-48c7-9091-fbab0c200c11-kube-api-access-m94gq\") pod \"keystone-operator-controller-manager-5bd55b4bff-p9rsr\" (UID: \"469c99b8-4171-48c7-9091-fbab0c200c11\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.732589 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.745683 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.746792 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.751233 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.751444 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.752250 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.752350 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.747748 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.753683 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.756245 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.756340 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.756406 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.757209 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.759644 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.760861 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-vlpmn" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.764086 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.764397 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-2k59b" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.764131 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.764166 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.771278 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.783766 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.765946 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftkdr\" (UniqueName: \"kubernetes.io/projected/8b7b2889-ed1b-45b0-909c-011b3fbee825-kube-api-access-ftkdr\") pod \"manila-operator-controller-manager-6d68dbc695-2mrbb\" (UID: \"8b7b2889-ed1b-45b0-909c-011b3fbee825\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.786116 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-phkwj" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.786915 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-m5cs5" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.789598 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.789831 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-tqths" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.790050 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-9hkzd" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.815754 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.817071 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.818572 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/25bcaa59-d154-41d1-8f73-92f41da4e3a9-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-qrxkz\" (UID: \"25bcaa59-d154-41d1-8f73-92f41da4e3a9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.818600 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vplf2\" (UniqueName: \"kubernetes.io/projected/25bcaa59-d154-41d1-8f73-92f41da4e3a9-kube-api-access-vplf2\") pod \"openstack-baremetal-operator-controller-manager-6d776955-qrxkz\" (UID: \"25bcaa59-d154-41d1-8f73-92f41da4e3a9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.818695 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9dkg\" (UniqueName: \"kubernetes.io/projected/b1d4f4a6-d94c-4b73-8f95-9378547c5453-kube-api-access-g9dkg\") pod \"octavia-operator-controller-manager-76fcc6dc7c-56jpm\" (UID: \"b1d4f4a6-d94c-4b73-8f95-9378547c5453\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.818730 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2zrl\" (UniqueName: \"kubernetes.io/projected/6ca3a550-cff6-49a7-ae12-43f75f743cb2-kube-api-access-j2zrl\") pod \"neutron-operator-controller-manager-64d7b59854-57rlr\" (UID: \"6ca3a550-cff6-49a7-ae12-43f75f743cb2\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.818752 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fzl2\" (UniqueName: \"kubernetes.io/projected/e5b640a6-b206-4061-95f7-59c09848b709-kube-api-access-7fzl2\") pod \"ovn-operator-controller-manager-9976ff44c-zf7wn\" (UID: \"e5b640a6-b206-4061-95f7-59c09848b709\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.818777 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmrbw\" (UniqueName: \"kubernetes.io/projected/bc2e190b-bcce-456a-938a-4a2cc054a43c-kube-api-access-dmrbw\") pod \"nova-operator-controller-manager-c7c776c96-5sgrl\" (UID: \"bc2e190b-bcce-456a-938a-4a2cc054a43c\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.818811 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79q2x\" (UniqueName: \"kubernetes.io/projected/19fb5b55-7b88-47ff-a4e5-b8995a29db8f-kube-api-access-79q2x\") pod \"mariadb-operator-controller-manager-88c7-d2k8w\" (UID: \"19fb5b55-7b88-47ff-a4e5-b8995a29db8f\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.823867 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-jf4w8" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.828775 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.849955 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.851523 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.855612 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.875047 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-sszw4" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.879985 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.881013 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.893515 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-jvn7s" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.919431 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.924919 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9dkg\" (UniqueName: \"kubernetes.io/projected/b1d4f4a6-d94c-4b73-8f95-9378547c5453-kube-api-access-g9dkg\") pod \"octavia-operator-controller-manager-76fcc6dc7c-56jpm\" (UID: \"b1d4f4a6-d94c-4b73-8f95-9378547c5453\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.927162 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fzl2\" (UniqueName: \"kubernetes.io/projected/e5b640a6-b206-4061-95f7-59c09848b709-kube-api-access-7fzl2\") pod \"ovn-operator-controller-manager-9976ff44c-zf7wn\" (UID: \"e5b640a6-b206-4061-95f7-59c09848b709\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.927190 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2zrl\" (UniqueName: \"kubernetes.io/projected/6ca3a550-cff6-49a7-ae12-43f75f743cb2-kube-api-access-j2zrl\") pod \"neutron-operator-controller-manager-64d7b59854-57rlr\" (UID: \"6ca3a550-cff6-49a7-ae12-43f75f743cb2\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.927235 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmrbw\" (UniqueName: \"kubernetes.io/projected/bc2e190b-bcce-456a-938a-4a2cc054a43c-kube-api-access-dmrbw\") pod \"nova-operator-controller-manager-c7c776c96-5sgrl\" (UID: \"bc2e190b-bcce-456a-938a-4a2cc054a43c\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.927289 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79q2x\" (UniqueName: \"kubernetes.io/projected/19fb5b55-7b88-47ff-a4e5-b8995a29db8f-kube-api-access-79q2x\") pod \"mariadb-operator-controller-manager-88c7-d2k8w\" (UID: \"19fb5b55-7b88-47ff-a4e5-b8995a29db8f\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.927397 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/25bcaa59-d154-41d1-8f73-92f41da4e3a9-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-qrxkz\" (UID: \"25bcaa59-d154-41d1-8f73-92f41da4e3a9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.927416 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vplf2\" (UniqueName: \"kubernetes.io/projected/25bcaa59-d154-41d1-8f73-92f41da4e3a9-kube-api-access-vplf2\") pod \"openstack-baremetal-operator-controller-manager-6d776955-qrxkz\" (UID: \"25bcaa59-d154-41d1-8f73-92f41da4e3a9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" Sep 30 20:28:39 crc kubenswrapper[4919]: E0930 20:28:39.928452 4919 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 20:28:39 crc kubenswrapper[4919]: E0930 20:28:39.928501 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/25bcaa59-d154-41d1-8f73-92f41da4e3a9-cert podName:25bcaa59-d154-41d1-8f73-92f41da4e3a9 nodeName:}" failed. No retries permitted until 2025-09-30 20:28:40.428485244 +0000 UTC m=+905.544518371 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/25bcaa59-d154-41d1-8f73-92f41da4e3a9-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-qrxkz" (UID: "25bcaa59-d154-41d1-8f73-92f41da4e3a9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.939846 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.958363 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.959465 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vplf2\" (UniqueName: \"kubernetes.io/projected/25bcaa59-d154-41d1-8f73-92f41da4e3a9-kube-api-access-vplf2\") pod \"openstack-baremetal-operator-controller-manager-6d776955-qrxkz\" (UID: \"25bcaa59-d154-41d1-8f73-92f41da4e3a9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.960046 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.961054 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2zrl\" (UniqueName: \"kubernetes.io/projected/6ca3a550-cff6-49a7-ae12-43f75f743cb2-kube-api-access-j2zrl\") pod \"neutron-operator-controller-manager-64d7b59854-57rlr\" (UID: \"6ca3a550-cff6-49a7-ae12-43f75f743cb2\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.961932 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9dkg\" (UniqueName: \"kubernetes.io/projected/b1d4f4a6-d94c-4b73-8f95-9378547c5453-kube-api-access-g9dkg\") pod \"octavia-operator-controller-manager-76fcc6dc7c-56jpm\" (UID: \"b1d4f4a6-d94c-4b73-8f95-9378547c5453\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.962918 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79q2x\" (UniqueName: \"kubernetes.io/projected/19fb5b55-7b88-47ff-a4e5-b8995a29db8f-kube-api-access-79q2x\") pod \"mariadb-operator-controller-manager-88c7-d2k8w\" (UID: \"19fb5b55-7b88-47ff-a4e5-b8995a29db8f\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.964442 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmrbw\" (UniqueName: \"kubernetes.io/projected/bc2e190b-bcce-456a-938a-4a2cc054a43c-kube-api-access-dmrbw\") pod \"nova-operator-controller-manager-c7c776c96-5sgrl\" (UID: \"bc2e190b-bcce-456a-938a-4a2cc054a43c\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.968066 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m"] Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.970835 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-nplhd" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.972043 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fzl2\" (UniqueName: \"kubernetes.io/projected/e5b640a6-b206-4061-95f7-59c09848b709-kube-api-access-7fzl2\") pod \"ovn-operator-controller-manager-9976ff44c-zf7wn\" (UID: \"e5b640a6-b206-4061-95f7-59c09848b709\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.972276 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.981107 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-7fq62" Sep 30 20:28:39 crc kubenswrapper[4919]: I0930 20:28:39.981262 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m"] Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:39.998610 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.006480 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr"] Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.028991 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bmvm\" (UniqueName: \"kubernetes.io/projected/f6ccf519-3c56-404b-a649-17f0cda5f592-kube-api-access-6bmvm\") pod \"swift-operator-controller-manager-bc7dc7bd9-xlw57\" (UID: \"f6ccf519-3c56-404b-a649-17f0cda5f592\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.029081 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppd7c\" (UniqueName: \"kubernetes.io/projected/803e4642-1c89-4c17-8d49-43496c3fade8-kube-api-access-ppd7c\") pod \"telemetry-operator-controller-manager-fb4cc5b89-ktj4d\" (UID: \"803e4642-1c89-4c17-8d49-43496c3fade8\") " pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.029119 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxgwj\" (UniqueName: \"kubernetes.io/projected/fc527cf7-785e-41fb-9162-fb0c93fc20ff-kube-api-access-hxgwj\") pod \"placement-operator-controller-manager-589c58c6c-k95j5\" (UID: \"fc527cf7-785e-41fb-9162-fb0c93fc20ff\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.030675 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.126044 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.133519 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr2fb\" (UniqueName: \"kubernetes.io/projected/9ce83d6e-31dc-43d2-b413-055ee52b075d-kube-api-access-rr2fb\") pod \"watcher-operator-controller-manager-76669f99c-kdb2m\" (UID: \"9ce83d6e-31dc-43d2-b413-055ee52b075d\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.133719 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1053b07d-a2f6-4580-8edd-65e680622c9e-cert\") pod \"infra-operator-controller-manager-9d6c5db85-v96qz\" (UID: \"1053b07d-a2f6-4580-8edd-65e680622c9e\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.133800 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppd7c\" (UniqueName: \"kubernetes.io/projected/803e4642-1c89-4c17-8d49-43496c3fade8-kube-api-access-ppd7c\") pod \"telemetry-operator-controller-manager-fb4cc5b89-ktj4d\" (UID: \"803e4642-1c89-4c17-8d49-43496c3fade8\") " pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.133941 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxgwj\" (UniqueName: \"kubernetes.io/projected/fc527cf7-785e-41fb-9162-fb0c93fc20ff-kube-api-access-hxgwj\") pod \"placement-operator-controller-manager-589c58c6c-k95j5\" (UID: \"fc527cf7-785e-41fb-9162-fb0c93fc20ff\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.134077 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bmvm\" (UniqueName: \"kubernetes.io/projected/f6ccf519-3c56-404b-a649-17f0cda5f592-kube-api-access-6bmvm\") pod \"swift-operator-controller-manager-bc7dc7bd9-xlw57\" (UID: \"f6ccf519-3c56-404b-a649-17f0cda5f592\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.134160 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx5pr\" (UniqueName: \"kubernetes.io/projected/aa83041a-f63d-4879-8756-5a2929e81305-kube-api-access-xx5pr\") pod \"test-operator-controller-manager-f66b554c6-sqjrr\" (UID: \"aa83041a-f63d-4879-8756-5a2929e81305\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.139447 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.157527 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.158897 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1053b07d-a2f6-4580-8edd-65e680622c9e-cert\") pod \"infra-operator-controller-manager-9d6c5db85-v96qz\" (UID: \"1053b07d-a2f6-4580-8edd-65e680622c9e\") " pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.168835 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66"] Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.171402 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.172767 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-tvlmz" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.172963 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.179386 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66"] Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.183192 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.186265 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppd7c\" (UniqueName: \"kubernetes.io/projected/803e4642-1c89-4c17-8d49-43496c3fade8-kube-api-access-ppd7c\") pod \"telemetry-operator-controller-manager-fb4cc5b89-ktj4d\" (UID: \"803e4642-1c89-4c17-8d49-43496c3fade8\") " pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.186701 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxgwj\" (UniqueName: \"kubernetes.io/projected/fc527cf7-785e-41fb-9162-fb0c93fc20ff-kube-api-access-hxgwj\") pod \"placement-operator-controller-manager-589c58c6c-k95j5\" (UID: \"fc527cf7-785e-41fb-9162-fb0c93fc20ff\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.194997 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bmvm\" (UniqueName: \"kubernetes.io/projected/f6ccf519-3c56-404b-a649-17f0cda5f592-kube-api-access-6bmvm\") pod \"swift-operator-controller-manager-bc7dc7bd9-xlw57\" (UID: \"f6ccf519-3c56-404b-a649-17f0cda5f592\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.203406 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd"] Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.204498 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.208906 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.209263 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-6bs5s" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.209497 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd"] Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.235116 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr2fb\" (UniqueName: \"kubernetes.io/projected/9ce83d6e-31dc-43d2-b413-055ee52b075d-kube-api-access-rr2fb\") pod \"watcher-operator-controller-manager-76669f99c-kdb2m\" (UID: \"9ce83d6e-31dc-43d2-b413-055ee52b075d\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.235275 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx5pr\" (UniqueName: \"kubernetes.io/projected/aa83041a-f63d-4879-8756-5a2929e81305-kube-api-access-xx5pr\") pod \"test-operator-controller-manager-f66b554c6-sqjrr\" (UID: \"aa83041a-f63d-4879-8756-5a2929e81305\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.241074 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.249315 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr2fb\" (UniqueName: \"kubernetes.io/projected/9ce83d6e-31dc-43d2-b413-055ee52b075d-kube-api-access-rr2fb\") pod \"watcher-operator-controller-manager-76669f99c-kdb2m\" (UID: \"9ce83d6e-31dc-43d2-b413-055ee52b075d\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.261990 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.269384 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.271374 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx5pr\" (UniqueName: \"kubernetes.io/projected/aa83041a-f63d-4879-8756-5a2929e81305-kube-api-access-xx5pr\") pod \"test-operator-controller-manager-f66b554c6-sqjrr\" (UID: \"aa83041a-f63d-4879-8756-5a2929e81305\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.309901 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.336732 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.337014 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfxwt\" (UniqueName: \"kubernetes.io/projected/8d89dcea-1720-4d39-8ea1-016d4c2ad572-kube-api-access-bfxwt\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd\" (UID: \"8d89dcea-1720-4d39-8ea1-016d4c2ad572\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.337076 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdb2k\" (UniqueName: \"kubernetes.io/projected/0f662e98-a8ef-4ae4-8d9b-2853a779ecf6-kube-api-access-pdb2k\") pod \"openstack-operator-controller-manager-6688bc8b84-h7q66\" (UID: \"0f662e98-a8ef-4ae4-8d9b-2853a779ecf6\") " pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.337129 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0f662e98-a8ef-4ae4-8d9b-2853a779ecf6-cert\") pod \"openstack-operator-controller-manager-6688bc8b84-h7q66\" (UID: \"0f662e98-a8ef-4ae4-8d9b-2853a779ecf6\") " pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.438716 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfxwt\" (UniqueName: \"kubernetes.io/projected/8d89dcea-1720-4d39-8ea1-016d4c2ad572-kube-api-access-bfxwt\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd\" (UID: \"8d89dcea-1720-4d39-8ea1-016d4c2ad572\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.439111 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/25bcaa59-d154-41d1-8f73-92f41da4e3a9-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-qrxkz\" (UID: \"25bcaa59-d154-41d1-8f73-92f41da4e3a9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.439369 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdb2k\" (UniqueName: \"kubernetes.io/projected/0f662e98-a8ef-4ae4-8d9b-2853a779ecf6-kube-api-access-pdb2k\") pod \"openstack-operator-controller-manager-6688bc8b84-h7q66\" (UID: \"0f662e98-a8ef-4ae4-8d9b-2853a779ecf6\") " pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.439555 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0f662e98-a8ef-4ae4-8d9b-2853a779ecf6-cert\") pod \"openstack-operator-controller-manager-6688bc8b84-h7q66\" (UID: \"0f662e98-a8ef-4ae4-8d9b-2853a779ecf6\") " pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" Sep 30 20:28:40 crc kubenswrapper[4919]: E0930 20:28:40.439885 4919 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 30 20:28:40 crc kubenswrapper[4919]: E0930 20:28:40.439949 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0f662e98-a8ef-4ae4-8d9b-2853a779ecf6-cert podName:0f662e98-a8ef-4ae4-8d9b-2853a779ecf6 nodeName:}" failed. No retries permitted until 2025-09-30 20:28:40.939927586 +0000 UTC m=+906.055960713 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0f662e98-a8ef-4ae4-8d9b-2853a779ecf6-cert") pod "openstack-operator-controller-manager-6688bc8b84-h7q66" (UID: "0f662e98-a8ef-4ae4-8d9b-2853a779ecf6") : secret "webhook-server-cert" not found Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.445120 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/25bcaa59-d154-41d1-8f73-92f41da4e3a9-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-qrxkz\" (UID: \"25bcaa59-d154-41d1-8f73-92f41da4e3a9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.462005 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdb2k\" (UniqueName: \"kubernetes.io/projected/0f662e98-a8ef-4ae4-8d9b-2853a779ecf6-kube-api-access-pdb2k\") pod \"openstack-operator-controller-manager-6688bc8b84-h7q66\" (UID: \"0f662e98-a8ef-4ae4-8d9b-2853a779ecf6\") " pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.464320 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfxwt\" (UniqueName: \"kubernetes.io/projected/8d89dcea-1720-4d39-8ea1-016d4c2ad572-kube-api-access-bfxwt\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd\" (UID: \"8d89dcea-1720-4d39-8ea1-016d4c2ad572\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.466308 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.497487 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.566588 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.895772 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk"] Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.896240 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh"] Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.925517 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8"] Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.949536 4919 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.951984 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0f662e98-a8ef-4ae4-8d9b-2853a779ecf6-cert\") pod \"openstack-operator-controller-manager-6688bc8b84-h7q66\" (UID: \"0f662e98-a8ef-4ae4-8d9b-2853a779ecf6\") " pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" Sep 30 20:28:40 crc kubenswrapper[4919]: I0930 20:28:40.960391 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0f662e98-a8ef-4ae4-8d9b-2853a779ecf6-cert\") pod \"openstack-operator-controller-manager-6688bc8b84-h7q66\" (UID: \"0f662e98-a8ef-4ae4-8d9b-2853a779ecf6\") " pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.141535 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.301473 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-k6295"] Sep 30 20:28:41 crc kubenswrapper[4919]: W0930 20:28:41.304164 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1fbeecba_9bf8_44ef_819b_63bcf26ce691.slice/crio-047cbec45ee981e232f2b329e5a834640e85a6f76aecfa19b11de8f45bd3452a WatchSource:0}: Error finding container 047cbec45ee981e232f2b329e5a834640e85a6f76aecfa19b11de8f45bd3452a: Status 404 returned error can't find the container with id 047cbec45ee981e232f2b329e5a834640e85a6f76aecfa19b11de8f45bd3452a Sep 30 20:28:41 crc kubenswrapper[4919]: W0930 20:28:41.325995 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod94b17ff0_8f16_4683_8153_a0d8b2b55437.slice/crio-c3d784fcbcc5f739d133e5da6661dfe2331c567b68944cbc257f614d51b188df WatchSource:0}: Error finding container c3d784fcbcc5f739d133e5da6661dfe2331c567b68944cbc257f614d51b188df: Status 404 returned error can't find the container with id c3d784fcbcc5f739d133e5da6661dfe2331c567b68944cbc257f614d51b188df Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.328096 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl"] Sep 30 20:28:41 crc kubenswrapper[4919]: W0930 20:28:41.330641 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod570ac8cc_5b75_4404_9df9_36387db5e5aa.slice/crio-e077c55af060275e9ec9e25430f1608da47417253f375b58281c6fc2a6d05df0 WatchSource:0}: Error finding container e077c55af060275e9ec9e25430f1608da47417253f375b58281c6fc2a6d05df0: Status 404 returned error can't find the container with id e077c55af060275e9ec9e25430f1608da47417253f375b58281c6fc2a6d05df0 Sep 30 20:28:41 crc kubenswrapper[4919]: W0930 20:28:41.331773 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod469c99b8_4171_48c7_9091_fbab0c200c11.slice/crio-a4ff8e8800cc71b2f2039e64ddb96974352cd83265a452b80d5afaa04f7a8d96 WatchSource:0}: Error finding container a4ff8e8800cc71b2f2039e64ddb96974352cd83265a452b80d5afaa04f7a8d96: Status 404 returned error can't find the container with id a4ff8e8800cc71b2f2039e64ddb96974352cd83265a452b80d5afaa04f7a8d96 Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.333675 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d"] Sep 30 20:28:41 crc kubenswrapper[4919]: W0930 20:28:41.333970 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7fc088d5_3fb5_40a2_b086_c1a4e52a325e.slice/crio-d1e1548a3c45ce1ffa316cc9c11efc6d8dd7018c9edd8510757ac678a7faa1ca WatchSource:0}: Error finding container d1e1548a3c45ce1ffa316cc9c11efc6d8dd7018c9edd8510757ac678a7faa1ca: Status 404 returned error can't find the container with id d1e1548a3c45ce1ffa316cc9c11efc6d8dd7018c9edd8510757ac678a7faa1ca Sep 30 20:28:41 crc kubenswrapper[4919]: W0930 20:28:41.334926 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc2e190b_bcce_456a_938a_4a2cc054a43c.slice/crio-001d411653e6bf8e1ccad21a89f81790de679d67b488e1bafecd29ff8a28aa27 WatchSource:0}: Error finding container 001d411653e6bf8e1ccad21a89f81790de679d67b488e1bafecd29ff8a28aa27: Status 404 returned error can't find the container with id 001d411653e6bf8e1ccad21a89f81790de679d67b488e1bafecd29ff8a28aa27 Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.357159 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx"] Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.366570 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w"] Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.373544 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr"] Sep 30 20:28:41 crc kubenswrapper[4919]: W0930 20:28:41.373904 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19fb5b55_7b88_47ff_a4e5_b8995a29db8f.slice/crio-98bb638abe60c20d5f24f2043e8ba296d941790275d3c11fc067d59c01e370dc WatchSource:0}: Error finding container 98bb638abe60c20d5f24f2043e8ba296d941790275d3c11fc067d59c01e370dc: Status 404 returned error can't find the container with id 98bb638abe60c20d5f24f2043e8ba296d941790275d3c11fc067d59c01e370dc Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.379557 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp"] Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.411473 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb"] Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.418597 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm"] Sep 30 20:28:41 crc kubenswrapper[4919]: W0930 20:28:41.419403 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b7b2889_ed1b_45b0_909c_011b3fbee825.slice/crio-b493ed2d157f9c3b5e4b5b974df036ecc735d3d7a5a10a40ca496c2d0bbc546b WatchSource:0}: Error finding container b493ed2d157f9c3b5e4b5b974df036ecc735d3d7a5a10a40ca496c2d0bbc546b: Status 404 returned error can't find the container with id b493ed2d157f9c3b5e4b5b974df036ecc735d3d7a5a10a40ca496c2d0bbc546b Sep 30 20:28:41 crc kubenswrapper[4919]: W0930 20:28:41.422142 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1d4f4a6_d94c_4b73_8f95_9378547c5453.slice/crio-e91c7ccacdb78ae40294f4a97194e398af7cb5f3ac7befdeded4e862f4c8ca1a WatchSource:0}: Error finding container e91c7ccacdb78ae40294f4a97194e398af7cb5f3ac7befdeded4e862f4c8ca1a: Status 404 returned error can't find the container with id e91c7ccacdb78ae40294f4a97194e398af7cb5f3ac7befdeded4e862f4c8ca1a Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.541663 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh" event={"ID":"35922b82-d9a9-425b-89e2-919fd9d937dd","Type":"ContainerStarted","Data":"cb16af2347794cd019aac677c5cc3c0dbbd14d1877ea5788bc4ae06d01baba51"} Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.546043 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp" event={"ID":"570ac8cc-5b75-4404-9df9-36387db5e5aa","Type":"ContainerStarted","Data":"e077c55af060275e9ec9e25430f1608da47417253f375b58281c6fc2a6d05df0"} Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.548166 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx" event={"ID":"7fc088d5-3fb5-40a2-b086-c1a4e52a325e","Type":"ContainerStarted","Data":"d1e1548a3c45ce1ffa316cc9c11efc6d8dd7018c9edd8510757ac678a7faa1ca"} Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.549317 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" event={"ID":"8b7b2889-ed1b-45b0-909c-011b3fbee825","Type":"ContainerStarted","Data":"b493ed2d157f9c3b5e4b5b974df036ecc735d3d7a5a10a40ca496c2d0bbc546b"} Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.550681 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" event={"ID":"fd8b2eba-9c90-4a16-b470-6e43eaa38f4d","Type":"ContainerStarted","Data":"73638e8fc23e9a89304043d55e22740479b8d63c04a143bd4e77ce8b3111cebd"} Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.554300 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" event={"ID":"469c99b8-4171-48c7-9091-fbab0c200c11","Type":"ContainerStarted","Data":"a4ff8e8800cc71b2f2039e64ddb96974352cd83265a452b80d5afaa04f7a8d96"} Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.556462 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d" event={"ID":"94b17ff0-8f16-4683-8153-a0d8b2b55437","Type":"ContainerStarted","Data":"c3d784fcbcc5f739d133e5da6661dfe2331c567b68944cbc257f614d51b188df"} Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.557568 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" event={"ID":"b1d4f4a6-d94c-4b73-8f95-9378547c5453","Type":"ContainerStarted","Data":"e91c7ccacdb78ae40294f4a97194e398af7cb5f3ac7befdeded4e862f4c8ca1a"} Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.558634 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" event={"ID":"75189fe6-5b26-4743-b2e2-8e0fee41c653","Type":"ContainerStarted","Data":"c4ed5ed40e41fff4b4cf428a1b9817b9df6c6c5c04fae9ef3ae4b3be8c0c2df9"} Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.559377 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" event={"ID":"19fb5b55-7b88-47ff-a4e5-b8995a29db8f","Type":"ContainerStarted","Data":"98bb638abe60c20d5f24f2043e8ba296d941790275d3c11fc067d59c01e370dc"} Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.560112 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" event={"ID":"bc2e190b-bcce-456a-938a-4a2cc054a43c","Type":"ContainerStarted","Data":"001d411653e6bf8e1ccad21a89f81790de679d67b488e1bafecd29ff8a28aa27"} Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.561529 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" event={"ID":"1fbeecba-9bf8-44ef-819b-63bcf26ce691","Type":"ContainerStarted","Data":"047cbec45ee981e232f2b329e5a834640e85a6f76aecfa19b11de8f45bd3452a"} Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.605045 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr"] Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.615360 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz"] Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.620628 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr"] Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.652186 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn"] Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.656277 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66"] Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.669340 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m"] Sep 30 20:28:41 crc kubenswrapper[4919]: W0930 20:28:41.680753 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1053b07d_a2f6_4580_8edd_65e680622c9e.slice/crio-7470f1163eed1c30780971d16f230d9ac71d233cec2fd10f9ea08da7dc0f7b04 WatchSource:0}: Error finding container 7470f1163eed1c30780971d16f230d9ac71d233cec2fd10f9ea08da7dc0f7b04: Status 404 returned error can't find the container with id 7470f1163eed1c30780971d16f230d9ac71d233cec2fd10f9ea08da7dc0f7b04 Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.703033 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d"] Sep 30 20:28:41 crc kubenswrapper[4919]: E0930 20:28:41.725589 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7fzl2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-9976ff44c-zf7wn_openstack-operators(e5b640a6-b206-4061-95f7-59c09848b709): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:28:41 crc kubenswrapper[4919]: E0930 20:28:41.727173 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xx5pr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-f66b554c6-sqjrr_openstack-operators(aa83041a-f63d-4879-8756-5a2929e81305): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:28:41 crc kubenswrapper[4919]: E0930 20:28:41.728779 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rr2fb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-76669f99c-kdb2m_openstack-operators(9ce83d6e-31dc-43d2-b413-055ee52b075d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:28:41 crc kubenswrapper[4919]: E0930 20:28:41.734840 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:3f96f0843934236c261db73dacb50fc12a288890562ee4ebdc9ec22360937cd3,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4r5xl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-9d6c5db85-v96qz_openstack-operators(1053b07d-a2f6-4580-8edd-65e680622c9e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:28:41 crc kubenswrapper[4919]: E0930 20:28:41.745177 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.38:5001/openstack-k8s-operators/telemetry-operator:e7cd56a6e6d4afac7bb79260e748db3eb166f53c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ppd7c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-fb4cc5b89-ktj4d_openstack-operators(803e4642-1c89-4c17-8d49-43496c3fade8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:28:41 crc kubenswrapper[4919]: E0930 20:28:41.745287 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bfxwt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd_openstack-operators(8d89dcea-1720-4d39-8ea1-016d4c2ad572): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:28:41 crc kubenswrapper[4919]: E0930 20:28:41.746437 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd" podUID="8d89dcea-1720-4d39-8ea1-016d4c2ad572" Sep 30 20:28:41 crc kubenswrapper[4919]: E0930 20:28:41.760431 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_LIGHTSPEED_IMAGE_URL_DEFAULT,Value:quay.io/openstack-lightspeed/rag-content:os-docs-2024.2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vplf2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-6d776955-qrxkz_openstack-operators(25bcaa59-d154-41d1-8f73-92f41da4e3a9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:28:41 crc kubenswrapper[4919]: E0930 20:28:41.766916 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hxgwj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-589c58c6c-k95j5_openstack-operators(fc527cf7-785e-41fb-9162-fb0c93fc20ff): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.790647 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd"] Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.796599 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57"] Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.801445 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz"] Sep 30 20:28:41 crc kubenswrapper[4919]: I0930 20:28:41.804224 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5"] Sep 30 20:28:42 crc kubenswrapper[4919]: E0930 20:28:42.601575 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" podUID="9ce83d6e-31dc-43d2-b413-055ee52b075d" Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.601931 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" event={"ID":"e5b640a6-b206-4061-95f7-59c09848b709","Type":"ContainerStarted","Data":"e835489074ed69f9992abca684c8e306ac70aa8890ee59c89e3e3756d8158ac2"} Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.601968 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" event={"ID":"e5b640a6-b206-4061-95f7-59c09848b709","Type":"ContainerStarted","Data":"2361d6d9c406f818b3eb97364e369da937b8313e0855f85922ac9338b08853ee"} Sep 30 20:28:42 crc kubenswrapper[4919]: E0930 20:28:42.607907 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" podUID="1053b07d-a2f6-4580-8edd-65e680622c9e" Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.608151 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd" event={"ID":"8d89dcea-1720-4d39-8ea1-016d4c2ad572","Type":"ContainerStarted","Data":"cc16d959e10d2eee0be9265851cf23b6db072a3eb7abe51e6a69833a12a90c6d"} Sep 30 20:28:42 crc kubenswrapper[4919]: E0930 20:28:42.609645 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" podUID="e5b640a6-b206-4061-95f7-59c09848b709" Sep 30 20:28:42 crc kubenswrapper[4919]: E0930 20:28:42.610580 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" podUID="fc527cf7-785e-41fb-9162-fb0c93fc20ff" Sep 30 20:28:42 crc kubenswrapper[4919]: E0930 20:28:42.613623 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" podUID="25bcaa59-d154-41d1-8f73-92f41da4e3a9" Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.615205 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" event={"ID":"fc527cf7-785e-41fb-9162-fb0c93fc20ff","Type":"ContainerStarted","Data":"a9239117a1d696342ef9ec99c4b9127b5dcd433d04ddbb95989a5cdbc3f081ca"} Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.615284 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" event={"ID":"fc527cf7-785e-41fb-9162-fb0c93fc20ff","Type":"ContainerStarted","Data":"e8a2dd625a77f47f940ef597746ee658a827926689badebd0f982648c03da3ad"} Sep 30 20:28:42 crc kubenswrapper[4919]: E0930 20:28:42.625661 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd" podUID="8d89dcea-1720-4d39-8ea1-016d4c2ad572" Sep 30 20:28:42 crc kubenswrapper[4919]: E0930 20:28:42.629790 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" podUID="803e4642-1c89-4c17-8d49-43496c3fade8" Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.631062 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" event={"ID":"25bcaa59-d154-41d1-8f73-92f41da4e3a9","Type":"ContainerStarted","Data":"49160b0736eb374237d33cc9fd2266922a0aa7d204b67798a394f9f960b98153"} Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.631096 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" event={"ID":"25bcaa59-d154-41d1-8f73-92f41da4e3a9","Type":"ContainerStarted","Data":"3194919817c2f3fe7bc10c7422fe9b89d83304046dff8764afe2d5e1dcd1c2ef"} Sep 30 20:28:42 crc kubenswrapper[4919]: E0930 20:28:42.632655 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" podUID="25bcaa59-d154-41d1-8f73-92f41da4e3a9" Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.651429 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57" event={"ID":"f6ccf519-3c56-404b-a649-17f0cda5f592","Type":"ContainerStarted","Data":"094b793856093c6bbe34f67841e937a0224a40b860f204d6f24154fa9186edab"} Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.666224 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" event={"ID":"9ce83d6e-31dc-43d2-b413-055ee52b075d","Type":"ContainerStarted","Data":"9e436842218f114b6916ec7f1b0f23fbe481ebfb2c00e4b8368cc58982f49af9"} Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.666266 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" event={"ID":"9ce83d6e-31dc-43d2-b413-055ee52b075d","Type":"ContainerStarted","Data":"cff953b78023105d55c7a68bb0115ff44e929ed2f6e1d2bd141c78e8fa5fbaad"} Sep 30 20:28:42 crc kubenswrapper[4919]: E0930 20:28:42.670576 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" podUID="9ce83d6e-31dc-43d2-b413-055ee52b075d" Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.678233 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" event={"ID":"aa83041a-f63d-4879-8756-5a2929e81305","Type":"ContainerStarted","Data":"f0fff6f3981fe97cb7196013e519528fdae28ff2264c140c76e0c2f5869d142d"} Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.701764 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr" event={"ID":"6ca3a550-cff6-49a7-ae12-43f75f743cb2","Type":"ContainerStarted","Data":"acfeebc88bbb21cca1a43f39a9e044f772b367629bcda72d1e24be2089fe59a0"} Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.709591 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" event={"ID":"1053b07d-a2f6-4580-8edd-65e680622c9e","Type":"ContainerStarted","Data":"0d25c74f5ffa15967bab76d3eec457e370beeb1f7a5a1c35c1dde2a26a1eaa6c"} Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.709631 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" event={"ID":"1053b07d-a2f6-4580-8edd-65e680622c9e","Type":"ContainerStarted","Data":"7470f1163eed1c30780971d16f230d9ac71d233cec2fd10f9ea08da7dc0f7b04"} Sep 30 20:28:42 crc kubenswrapper[4919]: E0930 20:28:42.722664 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:3f96f0843934236c261db73dacb50fc12a288890562ee4ebdc9ec22360937cd3\\\"\"" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" podUID="1053b07d-a2f6-4580-8edd-65e680622c9e" Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.741541 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" event={"ID":"803e4642-1c89-4c17-8d49-43496c3fade8","Type":"ContainerStarted","Data":"bff74b85aa6b9291ef07e14aef7b5523de34b476653ec2492abd42f7f28cb870"} Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.741594 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" event={"ID":"803e4642-1c89-4c17-8d49-43496c3fade8","Type":"ContainerStarted","Data":"f1ef8d033fce91df334e327e282a738c352f21a3c5ce30e003706637a1a97242"} Sep 30 20:28:42 crc kubenswrapper[4919]: E0930 20:28:42.756725 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.38:5001/openstack-k8s-operators/telemetry-operator:e7cd56a6e6d4afac7bb79260e748db3eb166f53c\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" podUID="803e4642-1c89-4c17-8d49-43496c3fade8" Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.774617 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" event={"ID":"0f662e98-a8ef-4ae4-8d9b-2853a779ecf6","Type":"ContainerStarted","Data":"6515da309a34c1f33f6369f3d00450039752b9076b2fce7f617056a91ccbe053"} Sep 30 20:28:42 crc kubenswrapper[4919]: I0930 20:28:42.774661 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" event={"ID":"0f662e98-a8ef-4ae4-8d9b-2853a779ecf6","Type":"ContainerStarted","Data":"ef1f6e6dec3fa252ba536ef504fca83145e1011d3c6da6ad3c9fa466115c25f5"} Sep 30 20:28:42 crc kubenswrapper[4919]: E0930 20:28:42.842933 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" podUID="aa83041a-f63d-4879-8756-5a2929e81305" Sep 30 20:28:43 crc kubenswrapper[4919]: I0930 20:28:43.805800 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" event={"ID":"aa83041a-f63d-4879-8756-5a2929e81305","Type":"ContainerStarted","Data":"8bd3433935e90e7e55126c985d149dfeb28af07cbedd0e488cfc160a2d82528b"} Sep 30 20:28:43 crc kubenswrapper[4919]: E0930 20:28:43.807205 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" podUID="aa83041a-f63d-4879-8756-5a2929e81305" Sep 30 20:28:43 crc kubenswrapper[4919]: I0930 20:28:43.829936 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" event={"ID":"0f662e98-a8ef-4ae4-8d9b-2853a779ecf6","Type":"ContainerStarted","Data":"05c254787d25e3a607eb4e51b801cfa09b31cd1c71f1416ed12b7ff8533a2cac"} Sep 30 20:28:43 crc kubenswrapper[4919]: E0930 20:28:43.832975 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" podUID="e5b640a6-b206-4061-95f7-59c09848b709" Sep 30 20:28:43 crc kubenswrapper[4919]: E0930 20:28:43.832983 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd" podUID="8d89dcea-1720-4d39-8ea1-016d4c2ad572" Sep 30 20:28:43 crc kubenswrapper[4919]: E0930 20:28:43.833030 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:e3f947e9034a951620a76eaf41ceec95eefcef0eacb251b10993d6820d5e1af6\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" podUID="25bcaa59-d154-41d1-8f73-92f41da4e3a9" Sep 30 20:28:43 crc kubenswrapper[4919]: E0930 20:28:43.833460 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:3f96f0843934236c261db73dacb50fc12a288890562ee4ebdc9ec22360937cd3\\\"\"" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" podUID="1053b07d-a2f6-4580-8edd-65e680622c9e" Sep 30 20:28:43 crc kubenswrapper[4919]: E0930 20:28:43.833505 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.38:5001/openstack-k8s-operators/telemetry-operator:e7cd56a6e6d4afac7bb79260e748db3eb166f53c\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" podUID="803e4642-1c89-4c17-8d49-43496c3fade8" Sep 30 20:28:43 crc kubenswrapper[4919]: E0930 20:28:43.833513 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" podUID="fc527cf7-785e-41fb-9162-fb0c93fc20ff" Sep 30 20:28:43 crc kubenswrapper[4919]: E0930 20:28:43.834789 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" podUID="9ce83d6e-31dc-43d2-b413-055ee52b075d" Sep 30 20:28:43 crc kubenswrapper[4919]: I0930 20:28:43.981915 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" podStartSLOduration=3.981898123 podStartE2EDuration="3.981898123s" podCreationTimestamp="2025-09-30 20:28:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:28:43.978651179 +0000 UTC m=+909.094684306" watchObservedRunningTime="2025-09-30 20:28:43.981898123 +0000 UTC m=+909.097931240" Sep 30 20:28:44 crc kubenswrapper[4919]: I0930 20:28:44.840923 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" Sep 30 20:28:44 crc kubenswrapper[4919]: E0930 20:28:44.843410 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" podUID="fc527cf7-785e-41fb-9162-fb0c93fc20ff" Sep 30 20:28:44 crc kubenswrapper[4919]: E0930 20:28:44.843757 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" podUID="aa83041a-f63d-4879-8756-5a2929e81305" Sep 30 20:28:51 crc kubenswrapper[4919]: I0930 20:28:51.152330 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6688bc8b84-h7q66" Sep 30 20:28:56 crc kubenswrapper[4919]: I0930 20:28:56.061895 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:28:56 crc kubenswrapper[4919]: I0930 20:28:56.062739 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:28:57 crc kubenswrapper[4919]: E0930 20:28:57.231516 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8" Sep 30 20:28:57 crc kubenswrapper[4919]: E0930 20:28:57.231706 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g9dkg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-76fcc6dc7c-56jpm_openstack-operators(b1d4f4a6-d94c-4b73-8f95-9378547c5453): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:28:58 crc kubenswrapper[4919]: E0930 20:28:58.128496 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:21792a2317c0a55e40b2a02a7d5d4682b76538ed2a2e0633199aa395e60ecc72" Sep 30 20:28:58 crc kubenswrapper[4919]: E0930 20:28:58.128930 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:21792a2317c0a55e40b2a02a7d5d4682b76538ed2a2e0633199aa395e60ecc72,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tc9wn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-84958c4d49-k6295_openstack-operators(1fbeecba-9bf8-44ef-819b-63bcf26ce691): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:28:58 crc kubenswrapper[4919]: E0930 20:28:58.539772 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a" Sep 30 20:28:58 crc kubenswrapper[4919]: E0930 20:28:58.539954 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m94gq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-5bd55b4bff-p9rsr_openstack-operators(469c99b8-4171-48c7-9091-fbab0c200c11): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:28:58 crc kubenswrapper[4919]: E0930 20:28:58.993603 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef" Sep 30 20:28:58 crc kubenswrapper[4919]: E0930 20:28:58.993820 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dmrbw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-c7c776c96-5sgrl_openstack-operators(bc2e190b-bcce-456a-938a-4a2cc054a43c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:28:59 crc kubenswrapper[4919]: E0930 20:28:59.425322 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884" Sep 30 20:28:59 crc kubenswrapper[4919]: E0930 20:28:59.425509 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ftkdr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-6d68dbc695-2mrbb_openstack-operators(8b7b2889-ed1b-45b0-909c-011b3fbee825): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:28:59 crc kubenswrapper[4919]: E0930 20:28:59.814153 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2" Sep 30 20:28:59 crc kubenswrapper[4919]: E0930 20:28:59.814375 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6l8zf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-84f4f7b77b-mjllk_openstack-operators(75189fe6-5b26-4743-b2e2-8e0fee41c653): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:29:00 crc kubenswrapper[4919]: E0930 20:29:00.238069 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:15d7b5a365350a831ca59d984df67fadeccf89d599e487a7597b105afb82ce4a" Sep 30 20:29:00 crc kubenswrapper[4919]: E0930 20:29:00.238348 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:15d7b5a365350a831ca59d984df67fadeccf89d599e487a7597b105afb82ce4a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-79q2x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-88c7-d2k8w_openstack-operators(19fb5b55-7b88-47ff-a4e5-b8995a29db8f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:29:00 crc kubenswrapper[4919]: E0930 20:29:00.621871 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:bb39758cc8cd0d2cd02841dc81b53fd88647e2db15ee16cdd8c44d4098a942fd" Sep 30 20:29:00 crc kubenswrapper[4919]: E0930 20:29:00.622050 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:bb39758cc8cd0d2cd02841dc81b53fd88647e2db15ee16cdd8c44d4098a942fd,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5zjcw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-6ff8b75857-nfpc8_openstack-operators(fd8b2eba-9c90-4a16-b470-6e43eaa38f4d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:29:05 crc kubenswrapper[4919]: E0930 20:29:05.098554 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" podUID="19fb5b55-7b88-47ff-a4e5-b8995a29db8f" Sep 30 20:29:05 crc kubenswrapper[4919]: E0930 20:29:05.160183 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" podUID="fd8b2eba-9c90-4a16-b470-6e43eaa38f4d" Sep 30 20:29:05 crc kubenswrapper[4919]: E0930 20:29:05.209631 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" podUID="bc2e190b-bcce-456a-938a-4a2cc054a43c" Sep 30 20:29:05 crc kubenswrapper[4919]: E0930 20:29:05.267545 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" podUID="75189fe6-5b26-4743-b2e2-8e0fee41c653" Sep 30 20:29:05 crc kubenswrapper[4919]: E0930 20:29:05.285817 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" podUID="8b7b2889-ed1b-45b0-909c-011b3fbee825" Sep 30 20:29:05 crc kubenswrapper[4919]: E0930 20:29:05.417800 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" podUID="b1d4f4a6-d94c-4b73-8f95-9378547c5453" Sep 30 20:29:05 crc kubenswrapper[4919]: E0930 20:29:05.475720 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" podUID="469c99b8-4171-48c7-9091-fbab0c200c11" Sep 30 20:29:05 crc kubenswrapper[4919]: E0930 20:29:05.686651 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" podUID="1fbeecba-9bf8-44ef-819b-63bcf26ce691" Sep 30 20:29:05 crc kubenswrapper[4919]: I0930 20:29:05.985711 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" event={"ID":"803e4642-1c89-4c17-8d49-43496c3fade8","Type":"ContainerStarted","Data":"aa34ac140597c763a3d5193b33a60a718cb4a53a06bc1284024b3cc8d6dbe6bb"} Sep 30 20:29:05 crc kubenswrapper[4919]: I0930 20:29:05.986167 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.003672 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh" event={"ID":"35922b82-d9a9-425b-89e2-919fd9d937dd","Type":"ContainerStarted","Data":"0d50d91d48e9ba3da163f9cce62f158b88810f67a4eb45dbfd084a4c6c667b56"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.003718 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh" event={"ID":"35922b82-d9a9-425b-89e2-919fd9d937dd","Type":"ContainerStarted","Data":"2632f9ff0d8c3b44668197f0c7ac2ae146f2331ce1274237b1345607cb5e0b04"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.003773 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.006238 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" event={"ID":"aa83041a-f63d-4879-8756-5a2929e81305","Type":"ContainerStarted","Data":"5b6e57cd61abd006ec721a2da3a1fa9059f1db39e9f1b4bae6e4a881f6f85213"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.006448 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.009630 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd" event={"ID":"8d89dcea-1720-4d39-8ea1-016d4c2ad572","Type":"ContainerStarted","Data":"306f62287de702e0bd26a89a2e93a5dca41af1a8bab4a61df0f7448322679b42"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.012721 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr" event={"ID":"6ca3a550-cff6-49a7-ae12-43f75f743cb2","Type":"ContainerStarted","Data":"60f1f39b928684fd2455d2fb4844e17ea8c1c005d87d1e577eae64669bc84b0e"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.012767 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr" event={"ID":"6ca3a550-cff6-49a7-ae12-43f75f743cb2","Type":"ContainerStarted","Data":"72bc23bda738ba33448595a7626c5420cdffdb45e186d2f4918de79ec15cdb8a"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.012891 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.015403 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" event={"ID":"1fbeecba-9bf8-44ef-819b-63bcf26ce691","Type":"ContainerStarted","Data":"946f6e8270fbeae18ffeaf5f5404ca12676e64d2db5642998afe14e00de29b1f"} Sep 30 20:29:06 crc kubenswrapper[4919]: E0930 20:29:06.017999 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:21792a2317c0a55e40b2a02a7d5d4682b76538ed2a2e0633199aa395e60ecc72\\\"\"" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" podUID="1fbeecba-9bf8-44ef-819b-63bcf26ce691" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.019740 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d" event={"ID":"94b17ff0-8f16-4683-8153-a0d8b2b55437","Type":"ContainerStarted","Data":"56ecf23b09d9953b28a00c73add6f97be00514b88ce39465144f4a897f7056dd"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.019772 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d" event={"ID":"94b17ff0-8f16-4683-8153-a0d8b2b55437","Type":"ContainerStarted","Data":"18ac82eaf126a93ef9543f554517cb5c378e7b5e743d0c74c0c0ad2e8cd3944f"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.019846 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.021607 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" event={"ID":"e5b640a6-b206-4061-95f7-59c09848b709","Type":"ContainerStarted","Data":"4b53f4d9f856665ac63314c421c2935e14ab8633a4fdc7e1451ffdf56d78ae0e"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.021785 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.026013 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" podStartSLOduration=3.960834252 podStartE2EDuration="27.025999305s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.744967739 +0000 UTC m=+906.861000866" lastFinishedPulling="2025-09-30 20:29:04.810132792 +0000 UTC m=+929.926165919" observedRunningTime="2025-09-30 20:29:06.021492375 +0000 UTC m=+931.137525502" watchObservedRunningTime="2025-09-30 20:29:06.025999305 +0000 UTC m=+931.142032432" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.026555 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp" event={"ID":"570ac8cc-5b75-4404-9df9-36387db5e5aa","Type":"ContainerStarted","Data":"09cc8fabc3a17e9849ce5a7442b379d78ca31f352bf615c5262bd8e39f3ad217"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.026600 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp" event={"ID":"570ac8cc-5b75-4404-9df9-36387db5e5aa","Type":"ContainerStarted","Data":"7829683ad22cc692968c9759f078b079fc0b6b946fa83bcb7e6cb4b900c67407"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.026738 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.028840 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx" event={"ID":"7fc088d5-3fb5-40a2-b086-c1a4e52a325e","Type":"ContainerStarted","Data":"6a524f9872f43b0dd41c3a54b3ea38ec3325b2078bbe9c3e4b5ff9b339a98e64"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.028864 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx" event={"ID":"7fc088d5-3fb5-40a2-b086-c1a4e52a325e","Type":"ContainerStarted","Data":"19af335bb524cdb6774277644c66de13d173d9b68bdf7d0c72bb8d7acfa5060a"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.029470 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.032037 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" event={"ID":"b1d4f4a6-d94c-4b73-8f95-9378547c5453","Type":"ContainerStarted","Data":"6416a8057bdaa6dfe2f98f8be2c0e8c25336ba69d258a3da3f48e5a6a4196655"} Sep 30 20:29:06 crc kubenswrapper[4919]: E0930 20:29:06.032998 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" podUID="b1d4f4a6-d94c-4b73-8f95-9378547c5453" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.033864 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" event={"ID":"25bcaa59-d154-41d1-8f73-92f41da4e3a9","Type":"ContainerStarted","Data":"63fc9d284eba59612ae8d185f1ea5357b8bd4865882a38b03619a6902c80fd23"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.034067 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.038247 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" event={"ID":"9ce83d6e-31dc-43d2-b413-055ee52b075d","Type":"ContainerStarted","Data":"81b55b7e749b31be1fa3161cc06d08685b128aed3b9d1aa1afb8b17a32a1d495"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.038425 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.040085 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57" event={"ID":"f6ccf519-3c56-404b-a649-17f0cda5f592","Type":"ContainerStarted","Data":"12bc78616306c151d20cf299d925fd6467c3bc0a7ba415c94abfd97c075eba4b"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.040270 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.042207 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" event={"ID":"fd8b2eba-9c90-4a16-b470-6e43eaa38f4d","Type":"ContainerStarted","Data":"0d032311cb6cfdf3b731a62f32b01111cf9e0e3327ad31d86ad3563e42f69390"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.042770 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh" podStartSLOduration=5.721277995 podStartE2EDuration="27.0427604s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:40.952626433 +0000 UTC m=+906.068659550" lastFinishedPulling="2025-09-30 20:29:02.274108828 +0000 UTC m=+927.390141955" observedRunningTime="2025-09-30 20:29:06.04102424 +0000 UTC m=+931.157057367" watchObservedRunningTime="2025-09-30 20:29:06.0427604 +0000 UTC m=+931.158793527" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.044794 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" event={"ID":"bc2e190b-bcce-456a-938a-4a2cc054a43c","Type":"ContainerStarted","Data":"381728d14cb31e2764f7c6c60324ff66291272b8dd1762d74180400211593b9b"} Sep 30 20:29:06 crc kubenswrapper[4919]: E0930 20:29:06.046394 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef\\\"\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" podUID="bc2e190b-bcce-456a-938a-4a2cc054a43c" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.046504 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" event={"ID":"469c99b8-4171-48c7-9091-fbab0c200c11","Type":"ContainerStarted","Data":"8af63fad6e2d57098ca9f0982abd7f0da54b8065f51262ada4dcdb15321b98bc"} Sep 30 20:29:06 crc kubenswrapper[4919]: E0930 20:29:06.047497 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" podUID="469c99b8-4171-48c7-9091-fbab0c200c11" Sep 30 20:29:06 crc kubenswrapper[4919]: E0930 20:29:06.048022 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:bb39758cc8cd0d2cd02841dc81b53fd88647e2db15ee16cdd8c44d4098a942fd\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" podUID="fd8b2eba-9c90-4a16-b470-6e43eaa38f4d" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.051395 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" event={"ID":"1053b07d-a2f6-4580-8edd-65e680622c9e","Type":"ContainerStarted","Data":"7a53b9a6d4b322b3c5b792ad906e36f316ab4443b054a954a146a5c02da53d93"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.051795 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.056188 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" event={"ID":"fc527cf7-785e-41fb-9162-fb0c93fc20ff","Type":"ContainerStarted","Data":"81b3d2d1feb7344d4ed9e851fbaa4288962fa6fab43ad39653485c02d70ef5e9"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.056865 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.061522 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" event={"ID":"75189fe6-5b26-4743-b2e2-8e0fee41c653","Type":"ContainerStarted","Data":"4780d7144b38e93fd42d8c9026b6e8455771164abb421df94c9bb0b411c61e12"} Sep 30 20:29:06 crc kubenswrapper[4919]: E0930 20:29:06.065669 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2\\\"\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" podUID="75189fe6-5b26-4743-b2e2-8e0fee41c653" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.077650 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" event={"ID":"19fb5b55-7b88-47ff-a4e5-b8995a29db8f","Type":"ContainerStarted","Data":"63a16f65fdddedca4217792591520b15bec40c616e345c6dff44c8f70e19490d"} Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.079667 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" event={"ID":"8b7b2889-ed1b-45b0-909c-011b3fbee825","Type":"ContainerStarted","Data":"5da23294663a2ee2d39487964d630613983e946837ed4c992133b36ba02c055d"} Sep 30 20:29:06 crc kubenswrapper[4919]: E0930 20:29:06.079861 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:15d7b5a365350a831ca59d984df67fadeccf89d599e487a7597b105afb82ce4a\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" podUID="19fb5b55-7b88-47ff-a4e5-b8995a29db8f" Sep 30 20:29:06 crc kubenswrapper[4919]: E0930 20:29:06.082708 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884\\\"\"" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" podUID="8b7b2889-ed1b-45b0-909c-011b3fbee825" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.090674 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd" podStartSLOduration=3.180195543 podStartE2EDuration="26.090658017s" podCreationTimestamp="2025-09-30 20:28:40 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.745165635 +0000 UTC m=+906.861198762" lastFinishedPulling="2025-09-30 20:29:04.655628099 +0000 UTC m=+929.771661236" observedRunningTime="2025-09-30 20:29:06.08592801 +0000 UTC m=+931.201961137" watchObservedRunningTime="2025-09-30 20:29:06.090658017 +0000 UTC m=+931.206691144" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.124557 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" podStartSLOduration=4.033391782 podStartE2EDuration="27.124539638s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.72635688 +0000 UTC m=+906.842390007" lastFinishedPulling="2025-09-30 20:29:04.817504736 +0000 UTC m=+929.933537863" observedRunningTime="2025-09-30 20:29:06.115580119 +0000 UTC m=+931.231613246" watchObservedRunningTime="2025-09-30 20:29:06.124539638 +0000 UTC m=+931.240572765" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.159515 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr" podStartSLOduration=6.538142771 podStartE2EDuration="27.1594943s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.65449689 +0000 UTC m=+906.770530017" lastFinishedPulling="2025-09-30 20:29:02.275848419 +0000 UTC m=+927.391881546" observedRunningTime="2025-09-30 20:29:06.149195992 +0000 UTC m=+931.265229139" watchObservedRunningTime="2025-09-30 20:29:06.1594943 +0000 UTC m=+931.275527427" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.205062 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d" podStartSLOduration=6.259018049 podStartE2EDuration="27.205043239s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.329645034 +0000 UTC m=+906.445678161" lastFinishedPulling="2025-09-30 20:29:02.275670224 +0000 UTC m=+927.391703351" observedRunningTime="2025-09-30 20:29:06.185701519 +0000 UTC m=+931.301734646" watchObservedRunningTime="2025-09-30 20:29:06.205043239 +0000 UTC m=+931.321076366" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.229443 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp" podStartSLOduration=6.299386799 podStartE2EDuration="27.229422495s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.344064022 +0000 UTC m=+906.460097159" lastFinishedPulling="2025-09-30 20:29:02.274099728 +0000 UTC m=+927.390132855" observedRunningTime="2025-09-30 20:29:06.223466392 +0000 UTC m=+931.339499519" watchObservedRunningTime="2025-09-30 20:29:06.229422495 +0000 UTC m=+931.345455622" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.303390 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx" podStartSLOduration=8.027419419 podStartE2EDuration="27.303371796s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.335501944 +0000 UTC m=+906.451535071" lastFinishedPulling="2025-09-30 20:29:00.611454321 +0000 UTC m=+925.727487448" observedRunningTime="2025-09-30 20:29:06.30283054 +0000 UTC m=+931.418863667" watchObservedRunningTime="2025-09-30 20:29:06.303371796 +0000 UTC m=+931.419404923" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.331560 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" podStartSLOduration=4.405571137 podStartE2EDuration="27.331543751s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.728638496 +0000 UTC m=+906.844671623" lastFinishedPulling="2025-09-30 20:29:04.65461111 +0000 UTC m=+929.770644237" observedRunningTime="2025-09-30 20:29:06.327813523 +0000 UTC m=+931.443846650" watchObservedRunningTime="2025-09-30 20:29:06.331543751 +0000 UTC m=+931.447576868" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.355780 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" podStartSLOduration=4.437173994 podStartE2EDuration="27.355763293s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.724990741 +0000 UTC m=+906.841023858" lastFinishedPulling="2025-09-30 20:29:04.64358003 +0000 UTC m=+929.759613157" observedRunningTime="2025-09-30 20:29:06.350569852 +0000 UTC m=+931.466602979" watchObservedRunningTime="2025-09-30 20:29:06.355763293 +0000 UTC m=+931.471796420" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.395217 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" podStartSLOduration=4.59665069 podStartE2EDuration="27.395197004s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.747171663 +0000 UTC m=+906.863204790" lastFinishedPulling="2025-09-30 20:29:04.545717977 +0000 UTC m=+929.661751104" observedRunningTime="2025-09-30 20:29:06.390971872 +0000 UTC m=+931.507004999" watchObservedRunningTime="2025-09-30 20:29:06.395197004 +0000 UTC m=+931.511230121" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.422689 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" podStartSLOduration=4.359988599 podStartE2EDuration="27.42267462s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.76468189 +0000 UTC m=+906.880715017" lastFinishedPulling="2025-09-30 20:29:04.827367881 +0000 UTC m=+929.943401038" observedRunningTime="2025-09-30 20:29:06.418920051 +0000 UTC m=+931.534953168" watchObservedRunningTime="2025-09-30 20:29:06.42267462 +0000 UTC m=+931.538707747" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.493630 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57" podStartSLOduration=4.697945784 podStartE2EDuration="27.493611284s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.750060667 +0000 UTC m=+906.866093804" lastFinishedPulling="2025-09-30 20:29:04.545726187 +0000 UTC m=+929.661759304" observedRunningTime="2025-09-30 20:29:06.48898835 +0000 UTC m=+931.605021477" watchObservedRunningTime="2025-09-30 20:29:06.493611284 +0000 UTC m=+931.609644411" Sep 30 20:29:06 crc kubenswrapper[4919]: I0930 20:29:06.508446 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" podStartSLOduration=4.495391999 podStartE2EDuration="27.508432813s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.733014193 +0000 UTC m=+906.849047320" lastFinishedPulling="2025-09-30 20:29:04.746054987 +0000 UTC m=+929.862088134" observedRunningTime="2025-09-30 20:29:06.506431915 +0000 UTC m=+931.622465042" watchObservedRunningTime="2025-09-30 20:29:06.508432813 +0000 UTC m=+931.624465940" Sep 30 20:29:07 crc kubenswrapper[4919]: I0930 20:29:07.089373 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57" event={"ID":"f6ccf519-3c56-404b-a649-17f0cda5f592","Type":"ContainerStarted","Data":"16db40ded53416fc93491554d301d9f6c002cb4e940489006cb3aa0cc4050700"} Sep 30 20:29:07 crc kubenswrapper[4919]: E0930 20:29:07.094593 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:bb39758cc8cd0d2cd02841dc81b53fd88647e2db15ee16cdd8c44d4098a942fd\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" podUID="fd8b2eba-9c90-4a16-b470-6e43eaa38f4d" Sep 30 20:29:07 crc kubenswrapper[4919]: E0930 20:29:07.094600 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:057de94f9afa340adc34f9b25f8007d9cd2ba71bc8b5d77aac522add53b7caef\\\"\"" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" podUID="bc2e190b-bcce-456a-938a-4a2cc054a43c" Sep 30 20:29:07 crc kubenswrapper[4919]: E0930 20:29:07.094668 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884\\\"\"" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" podUID="8b7b2889-ed1b-45b0-909c-011b3fbee825" Sep 30 20:29:07 crc kubenswrapper[4919]: E0930 20:29:07.094686 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:f6b935f67979298c3c263ad84d277e5cf26c0dbba3f85f255c1ec4d1d75241d2\\\"\"" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" podUID="75189fe6-5b26-4743-b2e2-8e0fee41c653" Sep 30 20:29:07 crc kubenswrapper[4919]: E0930 20:29:07.094734 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:15d7b5a365350a831ca59d984df67fadeccf89d599e487a7597b105afb82ce4a\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" podUID="19fb5b55-7b88-47ff-a4e5-b8995a29db8f" Sep 30 20:29:07 crc kubenswrapper[4919]: E0930 20:29:07.094749 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:4d08afd31dc5ded10c54a5541f514ac351e9b40a183285b3db27d0757a6354c8\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" podUID="b1d4f4a6-d94c-4b73-8f95-9378547c5453" Sep 30 20:29:07 crc kubenswrapper[4919]: E0930 20:29:07.096092 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" podUID="469c99b8-4171-48c7-9091-fbab0c200c11" Sep 30 20:29:07 crc kubenswrapper[4919]: E0930 20:29:07.096458 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:21792a2317c0a55e40b2a02a7d5d4682b76538ed2a2e0633199aa395e60ecc72\\\"\"" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" podUID="1fbeecba-9bf8-44ef-819b-63bcf26ce691" Sep 30 20:29:10 crc kubenswrapper[4919]: I0930 20:29:10.129319 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-57rlr" Sep 30 20:29:10 crc kubenswrapper[4919]: I0930 20:29:10.213997 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-zf7wn" Sep 30 20:29:10 crc kubenswrapper[4919]: I0930 20:29:10.244002 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-k95j5" Sep 30 20:29:10 crc kubenswrapper[4919]: I0930 20:29:10.267308 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-xlw57" Sep 30 20:29:10 crc kubenswrapper[4919]: I0930 20:29:10.274977 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-fb4cc5b89-ktj4d" Sep 30 20:29:10 crc kubenswrapper[4919]: I0930 20:29:10.312052 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-f66b554c6-sqjrr" Sep 30 20:29:10 crc kubenswrapper[4919]: I0930 20:29:10.355352 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-9d6c5db85-v96qz" Sep 30 20:29:10 crc kubenswrapper[4919]: I0930 20:29:10.472702 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-qrxkz" Sep 30 20:29:10 crc kubenswrapper[4919]: I0930 20:29:10.503332 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-kdb2m" Sep 30 20:29:19 crc kubenswrapper[4919]: I0930 20:29:19.181844 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" event={"ID":"75189fe6-5b26-4743-b2e2-8e0fee41c653","Type":"ContainerStarted","Data":"ea468e7e83d27bad6411530f6c38067aa2adea262635657380b17b488f8ca9d9"} Sep 30 20:29:19 crc kubenswrapper[4919]: I0930 20:29:19.184558 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" event={"ID":"fd8b2eba-9c90-4a16-b470-6e43eaa38f4d","Type":"ContainerStarted","Data":"f1a2961fc0569ce491da01305e2a80c0d00b5f33831dd7270f79bed59505b3db"} Sep 30 20:29:19 crc kubenswrapper[4919]: I0930 20:29:19.605113 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-7vfvh" Sep 30 20:29:19 crc kubenswrapper[4919]: I0930 20:29:19.709303 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-5dtjx" Sep 30 20:29:19 crc kubenswrapper[4919]: I0930 20:29:19.732411 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-w2t4d" Sep 30 20:29:19 crc kubenswrapper[4919]: I0930 20:29:19.858669 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-2w4vp" Sep 30 20:29:20 crc kubenswrapper[4919]: I0930 20:29:20.194708 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" event={"ID":"b1d4f4a6-d94c-4b73-8f95-9378547c5453","Type":"ContainerStarted","Data":"30044acd31a467ac768fe63bbc8c60e2af897110b20ec44fbd0744ac7c0a4c13"} Sep 30 20:29:20 crc kubenswrapper[4919]: I0930 20:29:20.194850 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" Sep 30 20:29:20 crc kubenswrapper[4919]: I0930 20:29:20.194988 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" Sep 30 20:29:20 crc kubenswrapper[4919]: I0930 20:29:20.228191 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" podStartSLOduration=3.903390463 podStartE2EDuration="41.228158481s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:40.949341958 +0000 UTC m=+906.065375085" lastFinishedPulling="2025-09-30 20:29:18.274109956 +0000 UTC m=+943.390143103" observedRunningTime="2025-09-30 20:29:20.224412252 +0000 UTC m=+945.340445389" watchObservedRunningTime="2025-09-30 20:29:20.228158481 +0000 UTC m=+945.344191608" Sep 30 20:29:20 crc kubenswrapper[4919]: I0930 20:29:20.247396 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" podStartSLOduration=3.189257223 podStartE2EDuration="41.247367067s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.42483265 +0000 UTC m=+906.540865777" lastFinishedPulling="2025-09-30 20:29:19.482942484 +0000 UTC m=+944.598975621" observedRunningTime="2025-09-30 20:29:20.240309002 +0000 UTC m=+945.356342129" watchObservedRunningTime="2025-09-30 20:29:20.247367067 +0000 UTC m=+945.363400224" Sep 30 20:29:20 crc kubenswrapper[4919]: I0930 20:29:20.280112 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" podStartSLOduration=3.971130025 podStartE2EDuration="41.280092854s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:40.967567397 +0000 UTC m=+906.083600524" lastFinishedPulling="2025-09-30 20:29:18.276530216 +0000 UTC m=+943.392563353" observedRunningTime="2025-09-30 20:29:20.26681053 +0000 UTC m=+945.382843667" watchObservedRunningTime="2025-09-30 20:29:20.280092854 +0000 UTC m=+945.396125991" Sep 30 20:29:21 crc kubenswrapper[4919]: I0930 20:29:21.209587 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" event={"ID":"469c99b8-4171-48c7-9091-fbab0c200c11","Type":"ContainerStarted","Data":"1e65f7c7bf26c810b1759a745bae40c9c7e41ed36967e4ed60dd34e6054b235e"} Sep 30 20:29:21 crc kubenswrapper[4919]: I0930 20:29:21.209990 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" Sep 30 20:29:21 crc kubenswrapper[4919]: I0930 20:29:21.230442 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" podStartSLOduration=3.396526014 podStartE2EDuration="42.230420929s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.335290078 +0000 UTC m=+906.451323195" lastFinishedPulling="2025-09-30 20:29:20.169184973 +0000 UTC m=+945.285218110" observedRunningTime="2025-09-30 20:29:21.2273585 +0000 UTC m=+946.343391627" watchObservedRunningTime="2025-09-30 20:29:21.230420929 +0000 UTC m=+946.346454066" Sep 30 20:29:22 crc kubenswrapper[4919]: I0930 20:29:22.221695 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" event={"ID":"8b7b2889-ed1b-45b0-909c-011b3fbee825","Type":"ContainerStarted","Data":"419e1a1a465be3c96517e2f3e8339b1ab3d88f9d340132c693126396c9fb49e1"} Sep 30 20:29:22 crc kubenswrapper[4919]: I0930 20:29:22.221914 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" Sep 30 20:29:22 crc kubenswrapper[4919]: I0930 20:29:22.225584 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" event={"ID":"1fbeecba-9bf8-44ef-819b-63bcf26ce691","Type":"ContainerStarted","Data":"796a6233713f876b4a85cf37281c912881839d6614886d72ae557ee55bd29ab2"} Sep 30 20:29:22 crc kubenswrapper[4919]: I0930 20:29:22.225846 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" Sep 30 20:29:22 crc kubenswrapper[4919]: I0930 20:29:22.246177 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" podStartSLOduration=3.363342971 podStartE2EDuration="43.246154496s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.421825253 +0000 UTC m=+906.537858380" lastFinishedPulling="2025-09-30 20:29:21.304636778 +0000 UTC m=+946.420669905" observedRunningTime="2025-09-30 20:29:22.243122528 +0000 UTC m=+947.359155665" watchObservedRunningTime="2025-09-30 20:29:22.246154496 +0000 UTC m=+947.362187663" Sep 30 20:29:22 crc kubenswrapper[4919]: I0930 20:29:22.268518 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" podStartSLOduration=3.382588678 podStartE2EDuration="43.268498853s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.319184181 +0000 UTC m=+906.435217308" lastFinishedPulling="2025-09-30 20:29:21.205094356 +0000 UTC m=+946.321127483" observedRunningTime="2025-09-30 20:29:22.262545141 +0000 UTC m=+947.378578278" watchObservedRunningTime="2025-09-30 20:29:22.268498853 +0000 UTC m=+947.384531980" Sep 30 20:29:23 crc kubenswrapper[4919]: I0930 20:29:23.235976 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" event={"ID":"19fb5b55-7b88-47ff-a4e5-b8995a29db8f","Type":"ContainerStarted","Data":"a864497f48510fed58fe405b1f468a246b6a19e326e95c9174109fd15ec768d6"} Sep 30 20:29:23 crc kubenswrapper[4919]: I0930 20:29:23.236624 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" Sep 30 20:29:23 crc kubenswrapper[4919]: I0930 20:29:23.243672 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" event={"ID":"bc2e190b-bcce-456a-938a-4a2cc054a43c","Type":"ContainerStarted","Data":"388f805d8719ede63914054bb68891352b7d7c0e4470b7d31c312f9bd70285b5"} Sep 30 20:29:23 crc kubenswrapper[4919]: I0930 20:29:23.288627 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" podStartSLOduration=3.348875433 podStartE2EDuration="44.288609808s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.379378974 +0000 UTC m=+906.495412101" lastFinishedPulling="2025-09-30 20:29:22.319113309 +0000 UTC m=+947.435146476" observedRunningTime="2025-09-30 20:29:23.266745275 +0000 UTC m=+948.382778412" watchObservedRunningTime="2025-09-30 20:29:23.288609808 +0000 UTC m=+948.404642935" Sep 30 20:29:23 crc kubenswrapper[4919]: I0930 20:29:23.289015 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" podStartSLOduration=3.518648709 podStartE2EDuration="44.28901125s" podCreationTimestamp="2025-09-30 20:28:39 +0000 UTC" firstStartedPulling="2025-09-30 20:28:41.33880589 +0000 UTC m=+906.454839017" lastFinishedPulling="2025-09-30 20:29:22.109168411 +0000 UTC m=+947.225201558" observedRunningTime="2025-09-30 20:29:23.282175062 +0000 UTC m=+948.398208189" watchObservedRunningTime="2025-09-30 20:29:23.28901125 +0000 UTC m=+948.405044377" Sep 30 20:29:26 crc kubenswrapper[4919]: I0930 20:29:26.062528 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:29:26 crc kubenswrapper[4919]: I0930 20:29:26.064712 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:29:26 crc kubenswrapper[4919]: I0930 20:29:26.065014 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:29:26 crc kubenswrapper[4919]: I0930 20:29:26.066005 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7330287e87c2c36810a07467a4c3caedfb96311988e76c64c3eedda691a5f9f5"} pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:29:26 crc kubenswrapper[4919]: I0930 20:29:26.066099 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" containerID="cri-o://7330287e87c2c36810a07467a4c3caedfb96311988e76c64c3eedda691a5f9f5" gracePeriod=600 Sep 30 20:29:26 crc kubenswrapper[4919]: I0930 20:29:26.272908 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb371a63-6d82-453e-930e-656710b97f10" containerID="7330287e87c2c36810a07467a4c3caedfb96311988e76c64c3eedda691a5f9f5" exitCode=0 Sep 30 20:29:26 crc kubenswrapper[4919]: I0930 20:29:26.272953 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerDied","Data":"7330287e87c2c36810a07467a4c3caedfb96311988e76c64c3eedda691a5f9f5"} Sep 30 20:29:26 crc kubenswrapper[4919]: I0930 20:29:26.272983 4919 scope.go:117] "RemoveContainer" containerID="233411e098bbdd508df400a23be94bf9227b0271eb6d0d9c0dd1c95d19986660" Sep 30 20:29:27 crc kubenswrapper[4919]: I0930 20:29:27.284036 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"266a47211086852ebceb8347506c7f46056112506f6f3e1b6a4412456d9a3ed6"} Sep 30 20:29:29 crc kubenswrapper[4919]: I0930 20:29:29.593995 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-nfpc8" Sep 30 20:29:29 crc kubenswrapper[4919]: I0930 20:29:29.620818 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k6295" Sep 30 20:29:29 crc kubenswrapper[4919]: I0930 20:29:29.708767 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-mjllk" Sep 30 20:29:30 crc kubenswrapper[4919]: I0930 20:29:30.002960 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-p9rsr" Sep 30 20:29:30 crc kubenswrapper[4919]: I0930 20:29:30.034085 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-2mrbb" Sep 30 20:29:30 crc kubenswrapper[4919]: I0930 20:29:30.142862 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-88c7-d2k8w" Sep 30 20:29:30 crc kubenswrapper[4919]: I0930 20:29:30.158666 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" Sep 30 20:29:30 crc kubenswrapper[4919]: I0930 20:29:30.164560 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-5sgrl" Sep 30 20:29:30 crc kubenswrapper[4919]: I0930 20:29:30.185639 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" Sep 30 20:29:30 crc kubenswrapper[4919]: I0930 20:29:30.193416 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-56jpm" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.849847 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-psgk6"] Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.851970 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.853319 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwflc\" (UniqueName: \"kubernetes.io/projected/b216f695-675e-4e29-9b4e-701b29fb8c3e-kube-api-access-zwflc\") pod \"dnsmasq-dns-675f4bcbfc-psgk6\" (UID: \"b216f695-675e-4e29-9b4e-701b29fb8c3e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.853382 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b216f695-675e-4e29-9b4e-701b29fb8c3e-config\") pod \"dnsmasq-dns-675f4bcbfc-psgk6\" (UID: \"b216f695-675e-4e29-9b4e-701b29fb8c3e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.854501 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.854712 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.857672 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-f2nrj" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.857866 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.865126 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-psgk6"] Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.923624 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-glwsx"] Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.924716 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.927119 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.934788 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-glwsx"] Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.955854 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwflc\" (UniqueName: \"kubernetes.io/projected/b216f695-675e-4e29-9b4e-701b29fb8c3e-kube-api-access-zwflc\") pod \"dnsmasq-dns-675f4bcbfc-psgk6\" (UID: \"b216f695-675e-4e29-9b4e-701b29fb8c3e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.955939 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b216f695-675e-4e29-9b4e-701b29fb8c3e-config\") pod \"dnsmasq-dns-675f4bcbfc-psgk6\" (UID: \"b216f695-675e-4e29-9b4e-701b29fb8c3e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.957092 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b216f695-675e-4e29-9b4e-701b29fb8c3e-config\") pod \"dnsmasq-dns-675f4bcbfc-psgk6\" (UID: \"b216f695-675e-4e29-9b4e-701b29fb8c3e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" Sep 30 20:29:48 crc kubenswrapper[4919]: I0930 20:29:48.990252 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwflc\" (UniqueName: \"kubernetes.io/projected/b216f695-675e-4e29-9b4e-701b29fb8c3e-kube-api-access-zwflc\") pod \"dnsmasq-dns-675f4bcbfc-psgk6\" (UID: \"b216f695-675e-4e29-9b4e-701b29fb8c3e\") " pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.058131 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55zpk\" (UniqueName: \"kubernetes.io/projected/a5ca12f5-90c7-48ad-a05a-8f03214d928f-kube-api-access-55zpk\") pod \"dnsmasq-dns-78dd6ddcc-glwsx\" (UID: \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.058181 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a5ca12f5-90c7-48ad-a05a-8f03214d928f-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-glwsx\" (UID: \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.058395 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5ca12f5-90c7-48ad-a05a-8f03214d928f-config\") pod \"dnsmasq-dns-78dd6ddcc-glwsx\" (UID: \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.159858 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5ca12f5-90c7-48ad-a05a-8f03214d928f-config\") pod \"dnsmasq-dns-78dd6ddcc-glwsx\" (UID: \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.159951 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55zpk\" (UniqueName: \"kubernetes.io/projected/a5ca12f5-90c7-48ad-a05a-8f03214d928f-kube-api-access-55zpk\") pod \"dnsmasq-dns-78dd6ddcc-glwsx\" (UID: \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.159975 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a5ca12f5-90c7-48ad-a05a-8f03214d928f-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-glwsx\" (UID: \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.160844 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a5ca12f5-90c7-48ad-a05a-8f03214d928f-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-glwsx\" (UID: \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.160848 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5ca12f5-90c7-48ad-a05a-8f03214d928f-config\") pod \"dnsmasq-dns-78dd6ddcc-glwsx\" (UID: \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.170636 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.179267 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55zpk\" (UniqueName: \"kubernetes.io/projected/a5ca12f5-90c7-48ad-a05a-8f03214d928f-kube-api-access-55zpk\") pod \"dnsmasq-dns-78dd6ddcc-glwsx\" (UID: \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.239000 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.599452 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-psgk6"] Sep 30 20:29:49 crc kubenswrapper[4919]: I0930 20:29:49.676349 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-glwsx"] Sep 30 20:29:49 crc kubenswrapper[4919]: W0930 20:29:49.680248 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5ca12f5_90c7_48ad_a05a_8f03214d928f.slice/crio-05740f74455be57a3c7ac7af10a447ed0b13639bac02e8d48d295df7407bd450 WatchSource:0}: Error finding container 05740f74455be57a3c7ac7af10a447ed0b13639bac02e8d48d295df7407bd450: Status 404 returned error can't find the container with id 05740f74455be57a3c7ac7af10a447ed0b13639bac02e8d48d295df7407bd450 Sep 30 20:29:50 crc kubenswrapper[4919]: I0930 20:29:50.491607 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" event={"ID":"a5ca12f5-90c7-48ad-a05a-8f03214d928f","Type":"ContainerStarted","Data":"05740f74455be57a3c7ac7af10a447ed0b13639bac02e8d48d295df7407bd450"} Sep 30 20:29:50 crc kubenswrapper[4919]: I0930 20:29:50.495596 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" event={"ID":"b216f695-675e-4e29-9b4e-701b29fb8c3e","Type":"ContainerStarted","Data":"6610bdc81341b87afe8f2c4c8d47b32929df320aa03967264935dc8e0ae9ea83"} Sep 30 20:29:50 crc kubenswrapper[4919]: I0930 20:29:50.913677 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-psgk6"] Sep 30 20:29:50 crc kubenswrapper[4919]: I0930 20:29:50.941421 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-zm2m4"] Sep 30 20:29:50 crc kubenswrapper[4919]: I0930 20:29:50.942974 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:29:50 crc kubenswrapper[4919]: I0930 20:29:50.951460 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-zm2m4"] Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.095042 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1309388-8cc1-45d3-9d03-da1049ea176f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-zm2m4\" (UID: \"f1309388-8cc1-45d3-9d03-da1049ea176f\") " pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.095133 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfvcs\" (UniqueName: \"kubernetes.io/projected/f1309388-8cc1-45d3-9d03-da1049ea176f-kube-api-access-mfvcs\") pod \"dnsmasq-dns-666b6646f7-zm2m4\" (UID: \"f1309388-8cc1-45d3-9d03-da1049ea176f\") " pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.095167 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1309388-8cc1-45d3-9d03-da1049ea176f-config\") pod \"dnsmasq-dns-666b6646f7-zm2m4\" (UID: \"f1309388-8cc1-45d3-9d03-da1049ea176f\") " pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.177476 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-glwsx"] Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.197266 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1309388-8cc1-45d3-9d03-da1049ea176f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-zm2m4\" (UID: \"f1309388-8cc1-45d3-9d03-da1049ea176f\") " pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.197349 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfvcs\" (UniqueName: \"kubernetes.io/projected/f1309388-8cc1-45d3-9d03-da1049ea176f-kube-api-access-mfvcs\") pod \"dnsmasq-dns-666b6646f7-zm2m4\" (UID: \"f1309388-8cc1-45d3-9d03-da1049ea176f\") " pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.197382 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1309388-8cc1-45d3-9d03-da1049ea176f-config\") pod \"dnsmasq-dns-666b6646f7-zm2m4\" (UID: \"f1309388-8cc1-45d3-9d03-da1049ea176f\") " pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.198522 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1309388-8cc1-45d3-9d03-da1049ea176f-config\") pod \"dnsmasq-dns-666b6646f7-zm2m4\" (UID: \"f1309388-8cc1-45d3-9d03-da1049ea176f\") " pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.199028 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1309388-8cc1-45d3-9d03-da1049ea176f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-zm2m4\" (UID: \"f1309388-8cc1-45d3-9d03-da1049ea176f\") " pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.211375 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-88rpf"] Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.212490 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.222547 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-88rpf"] Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.226185 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfvcs\" (UniqueName: \"kubernetes.io/projected/f1309388-8cc1-45d3-9d03-da1049ea176f-kube-api-access-mfvcs\") pod \"dnsmasq-dns-666b6646f7-zm2m4\" (UID: \"f1309388-8cc1-45d3-9d03-da1049ea176f\") " pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.264942 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.301653 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2f4f\" (UniqueName: \"kubernetes.io/projected/8f73ed0f-186b-45f3-9776-ec31c69a5d56-kube-api-access-n2f4f\") pod \"dnsmasq-dns-57d769cc4f-88rpf\" (UID: \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\") " pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.301750 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f73ed0f-186b-45f3-9776-ec31c69a5d56-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-88rpf\" (UID: \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\") " pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.301777 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f73ed0f-186b-45f3-9776-ec31c69a5d56-config\") pod \"dnsmasq-dns-57d769cc4f-88rpf\" (UID: \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\") " pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.404892 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2f4f\" (UniqueName: \"kubernetes.io/projected/8f73ed0f-186b-45f3-9776-ec31c69a5d56-kube-api-access-n2f4f\") pod \"dnsmasq-dns-57d769cc4f-88rpf\" (UID: \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\") " pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.405003 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f73ed0f-186b-45f3-9776-ec31c69a5d56-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-88rpf\" (UID: \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\") " pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.405025 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f73ed0f-186b-45f3-9776-ec31c69a5d56-config\") pod \"dnsmasq-dns-57d769cc4f-88rpf\" (UID: \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\") " pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.406201 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f73ed0f-186b-45f3-9776-ec31c69a5d56-config\") pod \"dnsmasq-dns-57d769cc4f-88rpf\" (UID: \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\") " pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.406986 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f73ed0f-186b-45f3-9776-ec31c69a5d56-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-88rpf\" (UID: \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\") " pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.444400 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2f4f\" (UniqueName: \"kubernetes.io/projected/8f73ed0f-186b-45f3-9776-ec31c69a5d56-kube-api-access-n2f4f\") pod \"dnsmasq-dns-57d769cc4f-88rpf\" (UID: \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\") " pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.567433 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:29:51 crc kubenswrapper[4919]: I0930 20:29:51.793786 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-zm2m4"] Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.062167 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-88rpf"] Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.074139 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.076061 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.081347 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.082673 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.083446 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.083537 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.083644 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-rb9r4" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.083706 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.085765 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 20:29:52 crc kubenswrapper[4919]: W0930 20:29:52.086309 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f73ed0f_186b_45f3_9776_ec31c69a5d56.slice/crio-a91862118572424a4f61d9ce677fe0eaaf9e63caa27e4e0127688885a4b02801 WatchSource:0}: Error finding container a91862118572424a4f61d9ce677fe0eaaf9e63caa27e4e0127688885a4b02801: Status 404 returned error can't find the container with id a91862118572424a4f61d9ce677fe0eaaf9e63caa27e4e0127688885a4b02801 Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.090419 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.225266 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.225600 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.225628 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/831f0cec-e526-41e4-851f-139ffef9bea5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.225666 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb8xm\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-kube-api-access-tb8xm\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.225685 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.225707 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.225723 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/831f0cec-e526-41e4-851f-139ffef9bea5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.225757 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.225782 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.225798 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-config-data\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.225832 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.328226 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/831f0cec-e526-41e4-851f-139ffef9bea5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.328289 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb8xm\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-kube-api-access-tb8xm\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.328307 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.328328 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.328345 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/831f0cec-e526-41e4-851f-139ffef9bea5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.328365 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.328386 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.328404 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-config-data\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.328436 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.328459 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.328478 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.328901 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.329519 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.329721 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-config-data\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.329880 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.330299 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.330883 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.341194 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.342461 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/831f0cec-e526-41e4-851f-139ffef9bea5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.347184 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/831f0cec-e526-41e4-851f-139ffef9bea5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.352763 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.374070 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.382600 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.385297 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.388815 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.388815 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.389000 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.389157 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-8dgqf" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.389281 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.392089 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.392311 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.398190 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb8xm\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-kube-api-access-tb8xm\") pod \"rabbitmq-server-0\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.406287 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.415758 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.524759 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" event={"ID":"f1309388-8cc1-45d3-9d03-da1049ea176f","Type":"ContainerStarted","Data":"ab1c1e5e28dee287aa03df2ea83a9f08ac147834954116b11bb71e0c129bfe7e"} Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.528042 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" event={"ID":"8f73ed0f-186b-45f3-9776-ec31c69a5d56","Type":"ContainerStarted","Data":"a91862118572424a4f61d9ce677fe0eaaf9e63caa27e4e0127688885a4b02801"} Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.531150 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.531194 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/567de3cf-1a4f-426d-b4d5-da78ead6e923-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.531246 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lszll\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-kube-api-access-lszll\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.531272 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.531293 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.531340 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.531478 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.531528 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.533379 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.533427 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.533454 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/567de3cf-1a4f-426d-b4d5-da78ead6e923-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634143 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634472 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634499 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634516 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634561 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634581 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634596 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/567de3cf-1a4f-426d-b4d5-da78ead6e923-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634624 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634647 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/567de3cf-1a4f-426d-b4d5-da78ead6e923-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634674 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lszll\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-kube-api-access-lszll\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634686 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634694 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.634690 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.635325 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.635565 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.637056 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.637189 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.642984 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.643490 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/567de3cf-1a4f-426d-b4d5-da78ead6e923-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.644742 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/567de3cf-1a4f-426d-b4d5-da78ead6e923-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.645619 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.654202 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lszll\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-kube-api-access-lszll\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.672549 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.787198 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:29:52 crc kubenswrapper[4919]: I0930 20:29:52.953665 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:29:52 crc kubenswrapper[4919]: W0930 20:29:52.969356 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod831f0cec_e526_41e4_851f_139ffef9bea5.slice/crio-fc485ce0ed183b024edf582d94bc1179466f05349f5295c49f9e92450845c274 WatchSource:0}: Error finding container fc485ce0ed183b024edf582d94bc1179466f05349f5295c49f9e92450845c274: Status 404 returned error can't find the container with id fc485ce0ed183b024edf582d94bc1179466f05349f5295c49f9e92450845c274 Sep 30 20:29:53 crc kubenswrapper[4919]: I0930 20:29:53.309604 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:29:53 crc kubenswrapper[4919]: I0930 20:29:53.555773 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"831f0cec-e526-41e4-851f-139ffef9bea5","Type":"ContainerStarted","Data":"fc485ce0ed183b024edf582d94bc1179466f05349f5295c49f9e92450845c274"} Sep 30 20:29:53 crc kubenswrapper[4919]: I0930 20:29:53.557603 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"567de3cf-1a4f-426d-b4d5-da78ead6e923","Type":"ContainerStarted","Data":"2cd648d520800c8309b5eafea58661f67c11f60b84c6ae967651d23bc59e6825"} Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.825268 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.827093 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.835260 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.835318 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.835515 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.835965 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.841364 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.841759 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-hs879" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.842512 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.978742 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f35330fc-f5b9-461f-801e-9ae42bd20866-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.978786 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35330fc-f5b9-461f-801e-9ae42bd20866-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.978836 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f35330fc-f5b9-461f-801e-9ae42bd20866-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.978855 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35330fc-f5b9-461f-801e-9ae42bd20866-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.978871 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frt9j\" (UniqueName: \"kubernetes.io/projected/f35330fc-f5b9-461f-801e-9ae42bd20866-kube-api-access-frt9j\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.978899 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f35330fc-f5b9-461f-801e-9ae42bd20866-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.978914 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f35330fc-f5b9-461f-801e-9ae42bd20866-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.978951 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:54 crc kubenswrapper[4919]: I0930 20:29:54.978965 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f35330fc-f5b9-461f-801e-9ae42bd20866-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.080325 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f35330fc-f5b9-461f-801e-9ae42bd20866-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.080359 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35330fc-f5b9-461f-801e-9ae42bd20866-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.080411 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f35330fc-f5b9-461f-801e-9ae42bd20866-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.080434 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35330fc-f5b9-461f-801e-9ae42bd20866-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.080450 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frt9j\" (UniqueName: \"kubernetes.io/projected/f35330fc-f5b9-461f-801e-9ae42bd20866-kube-api-access-frt9j\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.080478 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f35330fc-f5b9-461f-801e-9ae42bd20866-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.080494 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f35330fc-f5b9-461f-801e-9ae42bd20866-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.080530 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.080546 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f35330fc-f5b9-461f-801e-9ae42bd20866-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.081404 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f35330fc-f5b9-461f-801e-9ae42bd20866-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.082048 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.082291 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f35330fc-f5b9-461f-801e-9ae42bd20866-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.083363 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f35330fc-f5b9-461f-801e-9ae42bd20866-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.083783 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f35330fc-f5b9-461f-801e-9ae42bd20866-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.087843 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/f35330fc-f5b9-461f-801e-9ae42bd20866-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.088194 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f35330fc-f5b9-461f-801e-9ae42bd20866-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.089764 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f35330fc-f5b9-461f-801e-9ae42bd20866-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.096594 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frt9j\" (UniqueName: \"kubernetes.io/projected/f35330fc-f5b9-461f-801e-9ae42bd20866-kube-api-access-frt9j\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.102071 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f35330fc-f5b9-461f-801e-9ae42bd20866\") " pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.178978 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.180258 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.181754 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.182558 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.182579 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-mrh9z" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.187488 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.191700 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.205001 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.282369 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.282668 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d5e405b0-b5a3-4313-8fd2-b592b38e5926-kolla-config\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.282695 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trcxs\" (UniqueName: \"kubernetes.io/projected/d5e405b0-b5a3-4313-8fd2-b592b38e5926-kube-api-access-trcxs\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.282714 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5e405b0-b5a3-4313-8fd2-b592b38e5926-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.282752 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d5e405b0-b5a3-4313-8fd2-b592b38e5926-config-data-default\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.282789 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e405b0-b5a3-4313-8fd2-b592b38e5926-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.282827 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5e405b0-b5a3-4313-8fd2-b592b38e5926-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.282854 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d5e405b0-b5a3-4313-8fd2-b592b38e5926-config-data-generated\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.282880 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/d5e405b0-b5a3-4313-8fd2-b592b38e5926-secrets\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.389556 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/d5e405b0-b5a3-4313-8fd2-b592b38e5926-secrets\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.389821 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.389872 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d5e405b0-b5a3-4313-8fd2-b592b38e5926-kolla-config\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.389898 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trcxs\" (UniqueName: \"kubernetes.io/projected/d5e405b0-b5a3-4313-8fd2-b592b38e5926-kube-api-access-trcxs\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.390024 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5e405b0-b5a3-4313-8fd2-b592b38e5926-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.390112 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d5e405b0-b5a3-4313-8fd2-b592b38e5926-config-data-default\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.390141 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.391927 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d5e405b0-b5a3-4313-8fd2-b592b38e5926-kolla-config\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.392856 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d5e405b0-b5a3-4313-8fd2-b592b38e5926-config-data-default\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.393850 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e405b0-b5a3-4313-8fd2-b592b38e5926-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.394903 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5e405b0-b5a3-4313-8fd2-b592b38e5926-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.394946 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d5e405b0-b5a3-4313-8fd2-b592b38e5926-config-data-generated\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.395732 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d5e405b0-b5a3-4313-8fd2-b592b38e5926-config-data-generated\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.397623 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d5e405b0-b5a3-4313-8fd2-b592b38e5926-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.418234 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5e405b0-b5a3-4313-8fd2-b592b38e5926-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.418282 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5e405b0-b5a3-4313-8fd2-b592b38e5926-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.420620 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/d5e405b0-b5a3-4313-8fd2-b592b38e5926-secrets\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.423651 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.424366 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trcxs\" (UniqueName: \"kubernetes.io/projected/d5e405b0-b5a3-4313-8fd2-b592b38e5926-kube-api-access-trcxs\") pod \"openstack-galera-0\" (UID: \"d5e405b0-b5a3-4313-8fd2-b592b38e5926\") " pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.507120 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.578850 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.579778 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.583169 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.583424 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.583528 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-zcm8w" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.599680 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.699692 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5483de2-8939-4696-969b-efa0a56de229-combined-ca-bundle\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.699747 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5483de2-8939-4696-969b-efa0a56de229-memcached-tls-certs\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.699788 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f5483de2-8939-4696-969b-efa0a56de229-kolla-config\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.699827 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzdgt\" (UniqueName: \"kubernetes.io/projected/f5483de2-8939-4696-969b-efa0a56de229-kube-api-access-gzdgt\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.699851 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5483de2-8939-4696-969b-efa0a56de229-config-data\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.801774 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f5483de2-8939-4696-969b-efa0a56de229-kolla-config\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.801821 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzdgt\" (UniqueName: \"kubernetes.io/projected/f5483de2-8939-4696-969b-efa0a56de229-kube-api-access-gzdgt\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.801839 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5483de2-8939-4696-969b-efa0a56de229-config-data\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.802013 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5483de2-8939-4696-969b-efa0a56de229-combined-ca-bundle\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.802033 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5483de2-8939-4696-969b-efa0a56de229-memcached-tls-certs\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.802606 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f5483de2-8939-4696-969b-efa0a56de229-kolla-config\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.804386 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5483de2-8939-4696-969b-efa0a56de229-config-data\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.825626 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5483de2-8939-4696-969b-efa0a56de229-combined-ca-bundle\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.825704 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5483de2-8939-4696-969b-efa0a56de229-memcached-tls-certs\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.832472 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzdgt\" (UniqueName: \"kubernetes.io/projected/f5483de2-8939-4696-969b-efa0a56de229-kube-api-access-gzdgt\") pod \"memcached-0\" (UID: \"f5483de2-8939-4696-969b-efa0a56de229\") " pod="openstack/memcached-0" Sep 30 20:29:55 crc kubenswrapper[4919]: I0930 20:29:55.920301 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 30 20:29:57 crc kubenswrapper[4919]: I0930 20:29:57.415230 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:29:57 crc kubenswrapper[4919]: I0930 20:29:57.417505 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 20:29:57 crc kubenswrapper[4919]: I0930 20:29:57.420090 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-2wz98" Sep 30 20:29:57 crc kubenswrapper[4919]: I0930 20:29:57.427409 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:29:57 crc kubenswrapper[4919]: I0930 20:29:57.534130 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-768b9\" (UniqueName: \"kubernetes.io/projected/3643ae76-bfa6-4d35-94ad-fedfa85b1977-kube-api-access-768b9\") pod \"kube-state-metrics-0\" (UID: \"3643ae76-bfa6-4d35-94ad-fedfa85b1977\") " pod="openstack/kube-state-metrics-0" Sep 30 20:29:57 crc kubenswrapper[4919]: I0930 20:29:57.635598 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-768b9\" (UniqueName: \"kubernetes.io/projected/3643ae76-bfa6-4d35-94ad-fedfa85b1977-kube-api-access-768b9\") pod \"kube-state-metrics-0\" (UID: \"3643ae76-bfa6-4d35-94ad-fedfa85b1977\") " pod="openstack/kube-state-metrics-0" Sep 30 20:29:57 crc kubenswrapper[4919]: I0930 20:29:57.655490 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-768b9\" (UniqueName: \"kubernetes.io/projected/3643ae76-bfa6-4d35-94ad-fedfa85b1977-kube-api-access-768b9\") pod \"kube-state-metrics-0\" (UID: \"3643ae76-bfa6-4d35-94ad-fedfa85b1977\") " pod="openstack/kube-state-metrics-0" Sep 30 20:29:57 crc kubenswrapper[4919]: I0930 20:29:57.784235 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.134374 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h"] Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.135989 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.137600 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.137803 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.159823 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h"] Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.279830 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-config-volume\") pod \"collect-profiles-29321070-lz75h\" (UID: \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.279917 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4q9v\" (UniqueName: \"kubernetes.io/projected/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-kube-api-access-g4q9v\") pod \"collect-profiles-29321070-lz75h\" (UID: \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.279968 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-secret-volume\") pod \"collect-profiles-29321070-lz75h\" (UID: \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.381544 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-config-volume\") pod \"collect-profiles-29321070-lz75h\" (UID: \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.381882 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4q9v\" (UniqueName: \"kubernetes.io/projected/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-kube-api-access-g4q9v\") pod \"collect-profiles-29321070-lz75h\" (UID: \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.382007 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-secret-volume\") pod \"collect-profiles-29321070-lz75h\" (UID: \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.382270 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-config-volume\") pod \"collect-profiles-29321070-lz75h\" (UID: \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.387201 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-secret-volume\") pod \"collect-profiles-29321070-lz75h\" (UID: \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.395855 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4q9v\" (UniqueName: \"kubernetes.io/projected/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-kube-api-access-g4q9v\") pod \"collect-profiles-29321070-lz75h\" (UID: \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.461651 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.902565 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jx7tr"] Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.903609 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.905541 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-qq66q" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.905882 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.906031 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.914682 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-ppjcf"] Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.916717 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.920888 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jx7tr"] Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.983443 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-ppjcf"] Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994036 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/52b6421a-9e6a-490d-9940-b2931f34aae1-var-lib\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994095 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8b4d1dc0-4d24-4128-a83b-9f37e7356309-var-run-ovn\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994126 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/52b6421a-9e6a-490d-9940-b2931f34aae1-scripts\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994143 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b4d1dc0-4d24-4128-a83b-9f37e7356309-combined-ca-bundle\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994162 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/52b6421a-9e6a-490d-9940-b2931f34aae1-etc-ovs\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994186 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/52b6421a-9e6a-490d-9940-b2931f34aae1-var-log\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994203 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b4d1dc0-4d24-4128-a83b-9f37e7356309-ovn-controller-tls-certs\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994243 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/52b6421a-9e6a-490d-9940-b2931f34aae1-var-run\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994270 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b4d1dc0-4d24-4128-a83b-9f37e7356309-scripts\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994290 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmqpj\" (UniqueName: \"kubernetes.io/projected/8b4d1dc0-4d24-4128-a83b-9f37e7356309-kube-api-access-zmqpj\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994311 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw8hg\" (UniqueName: \"kubernetes.io/projected/52b6421a-9e6a-490d-9940-b2931f34aae1-kube-api-access-cw8hg\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994329 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8b4d1dc0-4d24-4128-a83b-9f37e7356309-var-log-ovn\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:00 crc kubenswrapper[4919]: I0930 20:30:00.994367 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8b4d1dc0-4d24-4128-a83b-9f37e7356309-var-run\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096340 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw8hg\" (UniqueName: \"kubernetes.io/projected/52b6421a-9e6a-490d-9940-b2931f34aae1-kube-api-access-cw8hg\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096399 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8b4d1dc0-4d24-4128-a83b-9f37e7356309-var-log-ovn\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096465 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8b4d1dc0-4d24-4128-a83b-9f37e7356309-var-run\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096501 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/52b6421a-9e6a-490d-9940-b2931f34aae1-var-lib\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096535 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8b4d1dc0-4d24-4128-a83b-9f37e7356309-var-run-ovn\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096572 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/52b6421a-9e6a-490d-9940-b2931f34aae1-scripts\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096597 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b4d1dc0-4d24-4128-a83b-9f37e7356309-combined-ca-bundle\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096620 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/52b6421a-9e6a-490d-9940-b2931f34aae1-etc-ovs\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096646 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/52b6421a-9e6a-490d-9940-b2931f34aae1-var-log\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096667 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b4d1dc0-4d24-4128-a83b-9f37e7356309-ovn-controller-tls-certs\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096705 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/52b6421a-9e6a-490d-9940-b2931f34aae1-var-run\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096744 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b4d1dc0-4d24-4128-a83b-9f37e7356309-scripts\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.096777 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmqpj\" (UniqueName: \"kubernetes.io/projected/8b4d1dc0-4d24-4128-a83b-9f37e7356309-kube-api-access-zmqpj\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.097624 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8b4d1dc0-4d24-4128-a83b-9f37e7356309-var-run\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.099109 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b4d1dc0-4d24-4128-a83b-9f37e7356309-scripts\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.099133 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/52b6421a-9e6a-490d-9940-b2931f34aae1-scripts\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.099299 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/52b6421a-9e6a-490d-9940-b2931f34aae1-var-lib\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.100336 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b4d1dc0-4d24-4128-a83b-9f37e7356309-combined-ca-bundle\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.100966 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b4d1dc0-4d24-4128-a83b-9f37e7356309-ovn-controller-tls-certs\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.103381 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/8b4d1dc0-4d24-4128-a83b-9f37e7356309-var-log-ovn\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.111688 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/52b6421a-9e6a-490d-9940-b2931f34aae1-var-log\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.112415 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/52b6421a-9e6a-490d-9940-b2931f34aae1-var-run\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.112419 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/52b6421a-9e6a-490d-9940-b2931f34aae1-etc-ovs\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.112649 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/8b4d1dc0-4d24-4128-a83b-9f37e7356309-var-run-ovn\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.114712 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmqpj\" (UniqueName: \"kubernetes.io/projected/8b4d1dc0-4d24-4128-a83b-9f37e7356309-kube-api-access-zmqpj\") pod \"ovn-controller-jx7tr\" (UID: \"8b4d1dc0-4d24-4128-a83b-9f37e7356309\") " pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.115112 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw8hg\" (UniqueName: \"kubernetes.io/projected/52b6421a-9e6a-490d-9940-b2931f34aae1-kube-api-access-cw8hg\") pod \"ovn-controller-ovs-ppjcf\" (UID: \"52b6421a-9e6a-490d-9940-b2931f34aae1\") " pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.289517 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.303436 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.350717 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.352199 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.354128 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.355157 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.355167 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-5gd5k" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.355668 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.355738 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.389113 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.400611 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87cf8edf-c133-4a62-939f-72dc079db17e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.400672 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87cf8edf-c133-4a62-939f-72dc079db17e-config\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.400716 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/87cf8edf-c133-4a62-939f-72dc079db17e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.400765 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.400794 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/87cf8edf-c133-4a62-939f-72dc079db17e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.400822 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9xf7\" (UniqueName: \"kubernetes.io/projected/87cf8edf-c133-4a62-939f-72dc079db17e-kube-api-access-v9xf7\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.400887 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87cf8edf-c133-4a62-939f-72dc079db17e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.400914 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87cf8edf-c133-4a62-939f-72dc079db17e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.502082 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87cf8edf-c133-4a62-939f-72dc079db17e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.502125 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87cf8edf-c133-4a62-939f-72dc079db17e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.502163 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87cf8edf-c133-4a62-939f-72dc079db17e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.502195 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87cf8edf-c133-4a62-939f-72dc079db17e-config\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.502240 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/87cf8edf-c133-4a62-939f-72dc079db17e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.502279 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.502307 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/87cf8edf-c133-4a62-939f-72dc079db17e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.502333 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9xf7\" (UniqueName: \"kubernetes.io/projected/87cf8edf-c133-4a62-939f-72dc079db17e-kube-api-access-v9xf7\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.503073 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.503296 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/87cf8edf-c133-4a62-939f-72dc079db17e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.503766 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87cf8edf-c133-4a62-939f-72dc079db17e-config\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.503962 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87cf8edf-c133-4a62-939f-72dc079db17e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.507678 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/87cf8edf-c133-4a62-939f-72dc079db17e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.507769 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87cf8edf-c133-4a62-939f-72dc079db17e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.508134 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/87cf8edf-c133-4a62-939f-72dc079db17e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.529986 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9xf7\" (UniqueName: \"kubernetes.io/projected/87cf8edf-c133-4a62-939f-72dc079db17e-kube-api-access-v9xf7\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.540690 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-nb-0\" (UID: \"87cf8edf-c133-4a62-939f-72dc079db17e\") " pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:01 crc kubenswrapper[4919]: I0930 20:30:01.681918 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.394075 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.396111 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.399046 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.400078 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.400229 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-mh4rt" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.400981 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.405473 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.479236 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.479553 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.479596 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.479658 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.479773 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-config\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.480010 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6xsz\" (UniqueName: \"kubernetes.io/projected/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-kube-api-access-j6xsz\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.480077 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.480130 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.582325 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.582516 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.583359 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-config\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.583529 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6xsz\" (UniqueName: \"kubernetes.io/projected/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-kube-api-access-j6xsz\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.583562 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.583598 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.583992 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-config\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.584111 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.584199 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.584285 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.585166 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.585209 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.588757 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.592447 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.592816 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.619538 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6xsz\" (UniqueName: \"kubernetes.io/projected/5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3-kube-api-access-j6xsz\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.622454 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"ovsdbserver-sb-0\" (UID: \"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3\") " pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:05 crc kubenswrapper[4919]: I0930 20:30:05.716495 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:06 crc kubenswrapper[4919]: E0930 20:30:06.668983 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Sep 30 20:30:06 crc kubenswrapper[4919]: E0930 20:30:06.670551 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tb8xm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(831f0cec-e526-41e4-851f-139ffef9bea5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:30:06 crc kubenswrapper[4919]: E0930 20:30:06.671888 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="831f0cec-e526-41e4-851f-139ffef9bea5" Sep 30 20:30:07 crc kubenswrapper[4919]: E0930 20:30:07.671899 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="831f0cec-e526-41e4-851f-139ffef9bea5" Sep 30 20:30:13 crc kubenswrapper[4919]: E0930 20:30:13.585690 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Sep 30 20:30:13 crc kubenswrapper[4919]: E0930 20:30:13.586599 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lszll,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(567de3cf-1a4f-426d-b4d5-da78ead6e923): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:30:13 crc kubenswrapper[4919]: E0930 20:30:13.587883 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="567de3cf-1a4f-426d-b4d5-da78ead6e923" Sep 30 20:30:13 crc kubenswrapper[4919]: E0930 20:30:13.717807 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="567de3cf-1a4f-426d-b4d5-da78ead6e923" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.572278 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.572781 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n2f4f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-88rpf_openstack(8f73ed0f-186b-45f3-9776-ec31c69a5d56): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.576915 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" podUID="8f73ed0f-186b-45f3-9776-ec31c69a5d56" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.583268 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.583505 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-55zpk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-glwsx_openstack(a5ca12f5-90c7-48ad-a05a-8f03214d928f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.584720 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" podUID="a5ca12f5-90c7-48ad-a05a-8f03214d928f" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.597863 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.598070 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mfvcs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-zm2m4_openstack(f1309388-8cc1-45d3-9d03-da1049ea176f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.599316 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" podUID="f1309388-8cc1-45d3-9d03-da1049ea176f" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.613602 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.613780 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zwflc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-psgk6_openstack(b216f695-675e-4e29-9b4e-701b29fb8c3e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.614921 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" podUID="b216f695-675e-4e29-9b4e-701b29fb8c3e" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.730064 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" podUID="8f73ed0f-186b-45f3-9776-ec31c69a5d56" Sep 30 20:30:14 crc kubenswrapper[4919]: E0930 20:30:14.730137 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" podUID="f1309388-8cc1-45d3-9d03-da1049ea176f" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.052549 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.066348 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.283102 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.293096 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.382799 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55zpk\" (UniqueName: \"kubernetes.io/projected/a5ca12f5-90c7-48ad-a05a-8f03214d928f-kube-api-access-55zpk\") pod \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\" (UID: \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\") " Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.382891 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5ca12f5-90c7-48ad-a05a-8f03214d928f-config\") pod \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\" (UID: \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\") " Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.382939 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a5ca12f5-90c7-48ad-a05a-8f03214d928f-dns-svc\") pod \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\" (UID: \"a5ca12f5-90c7-48ad-a05a-8f03214d928f\") " Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.382988 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwflc\" (UniqueName: \"kubernetes.io/projected/b216f695-675e-4e29-9b4e-701b29fb8c3e-kube-api-access-zwflc\") pod \"b216f695-675e-4e29-9b4e-701b29fb8c3e\" (UID: \"b216f695-675e-4e29-9b4e-701b29fb8c3e\") " Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.383094 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b216f695-675e-4e29-9b4e-701b29fb8c3e-config\") pod \"b216f695-675e-4e29-9b4e-701b29fb8c3e\" (UID: \"b216f695-675e-4e29-9b4e-701b29fb8c3e\") " Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.383537 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5ca12f5-90c7-48ad-a05a-8f03214d928f-config" (OuterVolumeSpecName: "config") pod "a5ca12f5-90c7-48ad-a05a-8f03214d928f" (UID: "a5ca12f5-90c7-48ad-a05a-8f03214d928f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.383809 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b216f695-675e-4e29-9b4e-701b29fb8c3e-config" (OuterVolumeSpecName: "config") pod "b216f695-675e-4e29-9b4e-701b29fb8c3e" (UID: "b216f695-675e-4e29-9b4e-701b29fb8c3e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.383821 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5ca12f5-90c7-48ad-a05a-8f03214d928f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a5ca12f5-90c7-48ad-a05a-8f03214d928f" (UID: "a5ca12f5-90c7-48ad-a05a-8f03214d928f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.398856 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b216f695-675e-4e29-9b4e-701b29fb8c3e-kube-api-access-zwflc" (OuterVolumeSpecName: "kube-api-access-zwflc") pod "b216f695-675e-4e29-9b4e-701b29fb8c3e" (UID: "b216f695-675e-4e29-9b4e-701b29fb8c3e"). InnerVolumeSpecName "kube-api-access-zwflc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.398933 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5ca12f5-90c7-48ad-a05a-8f03214d928f-kube-api-access-55zpk" (OuterVolumeSpecName: "kube-api-access-55zpk") pod "a5ca12f5-90c7-48ad-a05a-8f03214d928f" (UID: "a5ca12f5-90c7-48ad-a05a-8f03214d928f"). InnerVolumeSpecName "kube-api-access-55zpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.431953 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:30:15 crc kubenswrapper[4919]: W0930 20:30:15.434054 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podccd8b2d8_28c3_42a2_a3d9_e4fb724eea17.slice/crio-94dfbd533835ec6a4225e56b6677aebaa13983eeb98ba4086daab5c7c0ace1d8 WatchSource:0}: Error finding container 94dfbd533835ec6a4225e56b6677aebaa13983eeb98ba4086daab5c7c0ace1d8: Status 404 returned error can't find the container with id 94dfbd533835ec6a4225e56b6677aebaa13983eeb98ba4086daab5c7c0ace1d8 Sep 30 20:30:15 crc kubenswrapper[4919]: W0930 20:30:15.443337 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf35330fc_f5b9_461f_801e_9ae42bd20866.slice/crio-3fd7a11f5f061b99ca61fc60d09174d471ee2475e9e8660b340fbdce2ca02bf5 WatchSource:0}: Error finding container 3fd7a11f5f061b99ca61fc60d09174d471ee2475e9e8660b340fbdce2ca02bf5: Status 404 returned error can't find the container with id 3fd7a11f5f061b99ca61fc60d09174d471ee2475e9e8660b340fbdce2ca02bf5 Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.446486 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h"] Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.454177 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.468610 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jx7tr"] Sep 30 20:30:15 crc kubenswrapper[4919]: W0930 20:30:15.473549 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b4d1dc0_4d24_4128_a83b_9f37e7356309.slice/crio-5998ac74652cdaa8a8ca2db325186d134d7a55bf0ad75615ebcaac2c2eb09cc6 WatchSource:0}: Error finding container 5998ac74652cdaa8a8ca2db325186d134d7a55bf0ad75615ebcaac2c2eb09cc6: Status 404 returned error can't find the container with id 5998ac74652cdaa8a8ca2db325186d134d7a55bf0ad75615ebcaac2c2eb09cc6 Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.486056 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5ca12f5-90c7-48ad-a05a-8f03214d928f-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.486079 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a5ca12f5-90c7-48ad-a05a-8f03214d928f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.486089 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwflc\" (UniqueName: \"kubernetes.io/projected/b216f695-675e-4e29-9b4e-701b29fb8c3e-kube-api-access-zwflc\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.486098 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b216f695-675e-4e29-9b4e-701b29fb8c3e-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.486108 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55zpk\" (UniqueName: \"kubernetes.io/projected/a5ca12f5-90c7-48ad-a05a-8f03214d928f-kube-api-access-55zpk\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.565980 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 30 20:30:15 crc kubenswrapper[4919]: W0930 20:30:15.570515 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87cf8edf_c133_4a62_939f_72dc079db17e.slice/crio-773ec6c98c152375a457ad5e685aed13e705c2f1a691c47e7722f6eb32e43b46 WatchSource:0}: Error finding container 773ec6c98c152375a457ad5e685aed13e705c2f1a691c47e7722f6eb32e43b46: Status 404 returned error can't find the container with id 773ec6c98c152375a457ad5e685aed13e705c2f1a691c47e7722f6eb32e43b46 Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.745745 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"87cf8edf-c133-4a62-939f-72dc079db17e","Type":"ContainerStarted","Data":"773ec6c98c152375a457ad5e685aed13e705c2f1a691c47e7722f6eb32e43b46"} Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.747973 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"d5e405b0-b5a3-4313-8fd2-b592b38e5926","Type":"ContainerStarted","Data":"370e88248de26040d45ad8e6682454db70f47ec0edfe0e0eddffb3f23b007eff"} Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.754698 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" event={"ID":"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17","Type":"ContainerStarted","Data":"962aee403f133f2b855f9a6b12388ca94ca531c4c1a1804ae120ab593eb3c3b7"} Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.754752 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" event={"ID":"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17","Type":"ContainerStarted","Data":"94dfbd533835ec6a4225e56b6677aebaa13983eeb98ba4086daab5c7c0ace1d8"} Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.762468 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f35330fc-f5b9-461f-801e-9ae42bd20866","Type":"ContainerStarted","Data":"3fd7a11f5f061b99ca61fc60d09174d471ee2475e9e8660b340fbdce2ca02bf5"} Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.773510 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jx7tr" event={"ID":"8b4d1dc0-4d24-4128-a83b-9f37e7356309","Type":"ContainerStarted","Data":"5998ac74652cdaa8a8ca2db325186d134d7a55bf0ad75615ebcaac2c2eb09cc6"} Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.778252 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" event={"ID":"a5ca12f5-90c7-48ad-a05a-8f03214d928f","Type":"ContainerDied","Data":"05740f74455be57a3c7ac7af10a447ed0b13639bac02e8d48d295df7407bd450"} Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.778349 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-glwsx" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.785643 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"f5483de2-8939-4696-969b-efa0a56de229","Type":"ContainerStarted","Data":"4d996596d540b6da5c2e14b58649f0ffadf9aa330588c797d5346ef8153e0013"} Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.787462 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3643ae76-bfa6-4d35-94ad-fedfa85b1977","Type":"ContainerStarted","Data":"070181727a325421ea86c8fa88a344d157590d43b7ae3db06115ba269f9e32dd"} Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.788856 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" event={"ID":"b216f695-675e-4e29-9b4e-701b29fb8c3e","Type":"ContainerDied","Data":"6610bdc81341b87afe8f2c4c8d47b32929df320aa03967264935dc8e0ae9ea83"} Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.788969 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-psgk6" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.795929 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" podStartSLOduration=15.795915586 podStartE2EDuration="15.795915586s" podCreationTimestamp="2025-09-30 20:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:30:15.792962581 +0000 UTC m=+1000.908995718" watchObservedRunningTime="2025-09-30 20:30:15.795915586 +0000 UTC m=+1000.911948713" Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.873989 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-glwsx"] Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.883280 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-glwsx"] Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.899264 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-psgk6"] Sep 30 20:30:15 crc kubenswrapper[4919]: I0930 20:30:15.906463 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-psgk6"] Sep 30 20:30:16 crc kubenswrapper[4919]: I0930 20:30:16.278453 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 30 20:30:16 crc kubenswrapper[4919]: I0930 20:30:16.383303 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-ppjcf"] Sep 30 20:30:16 crc kubenswrapper[4919]: W0930 20:30:16.695730 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod52b6421a_9e6a_490d_9940_b2931f34aae1.slice/crio-af7f926fe2c6c306293bebfbb334dbb2b0c8f58c26cb9aa12e0053bbe6a3d7fd WatchSource:0}: Error finding container af7f926fe2c6c306293bebfbb334dbb2b0c8f58c26cb9aa12e0053bbe6a3d7fd: Status 404 returned error can't find the container with id af7f926fe2c6c306293bebfbb334dbb2b0c8f58c26cb9aa12e0053bbe6a3d7fd Sep 30 20:30:16 crc kubenswrapper[4919]: I0930 20:30:16.803033 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ppjcf" event={"ID":"52b6421a-9e6a-490d-9940-b2931f34aae1","Type":"ContainerStarted","Data":"af7f926fe2c6c306293bebfbb334dbb2b0c8f58c26cb9aa12e0053bbe6a3d7fd"} Sep 30 20:30:16 crc kubenswrapper[4919]: I0930 20:30:16.804492 4919 generic.go:334] "Generic (PLEG): container finished" podID="ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17" containerID="962aee403f133f2b855f9a6b12388ca94ca531c4c1a1804ae120ab593eb3c3b7" exitCode=0 Sep 30 20:30:16 crc kubenswrapper[4919]: I0930 20:30:16.804531 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" event={"ID":"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17","Type":"ContainerDied","Data":"962aee403f133f2b855f9a6b12388ca94ca531c4c1a1804ae120ab593eb3c3b7"} Sep 30 20:30:17 crc kubenswrapper[4919]: W0930 20:30:17.456357 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b354ea4_a8cd_4c32_aa58_4e5e5d56a1e3.slice/crio-134e2272a9338c68670c1090e21e245b7463665a41f174d040001591872363e9 WatchSource:0}: Error finding container 134e2272a9338c68670c1090e21e245b7463665a41f174d040001591872363e9: Status 404 returned error can't find the container with id 134e2272a9338c68670c1090e21e245b7463665a41f174d040001591872363e9 Sep 30 20:30:17 crc kubenswrapper[4919]: I0930 20:30:17.641823 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5ca12f5-90c7-48ad-a05a-8f03214d928f" path="/var/lib/kubelet/pods/a5ca12f5-90c7-48ad-a05a-8f03214d928f/volumes" Sep 30 20:30:17 crc kubenswrapper[4919]: I0930 20:30:17.642180 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b216f695-675e-4e29-9b4e-701b29fb8c3e" path="/var/lib/kubelet/pods/b216f695-675e-4e29-9b4e-701b29fb8c3e/volumes" Sep 30 20:30:17 crc kubenswrapper[4919]: I0930 20:30:17.814203 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3","Type":"ContainerStarted","Data":"134e2272a9338c68670c1090e21e245b7463665a41f174d040001591872363e9"} Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.589377 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.650389 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-secret-volume\") pod \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\" (UID: \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\") " Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.650683 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-config-volume\") pod \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\" (UID: \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\") " Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.650803 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4q9v\" (UniqueName: \"kubernetes.io/projected/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-kube-api-access-g4q9v\") pod \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\" (UID: \"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17\") " Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.652370 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-config-volume" (OuterVolumeSpecName: "config-volume") pod "ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17" (UID: "ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.657150 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17" (UID: "ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.657617 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-kube-api-access-g4q9v" (OuterVolumeSpecName: "kube-api-access-g4q9v") pod "ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17" (UID: "ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17"). InnerVolumeSpecName "kube-api-access-g4q9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.752980 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4q9v\" (UniqueName: \"kubernetes.io/projected/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-kube-api-access-g4q9v\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.753013 4919 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.753027 4919 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.839977 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" event={"ID":"ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17","Type":"ContainerDied","Data":"94dfbd533835ec6a4225e56b6677aebaa13983eeb98ba4086daab5c7c0ace1d8"} Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.840019 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94dfbd533835ec6a4225e56b6677aebaa13983eeb98ba4086daab5c7c0ace1d8" Sep 30 20:30:19 crc kubenswrapper[4919]: I0930 20:30:19.840082 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321070-lz75h" Sep 30 20:30:23 crc kubenswrapper[4919]: I0930 20:30:23.886793 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"87cf8edf-c133-4a62-939f-72dc079db17e","Type":"ContainerStarted","Data":"2036df0388e6b810d80203596e1954f4336f80bf1b8d179ea317660840066b5a"} Sep 30 20:30:23 crc kubenswrapper[4919]: I0930 20:30:23.897959 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"d5e405b0-b5a3-4313-8fd2-b592b38e5926","Type":"ContainerStarted","Data":"b87909e2f159fd43f5bbdc55da5ffe40395a2424e960183740005f6756545d31"} Sep 30 20:30:23 crc kubenswrapper[4919]: I0930 20:30:23.904790 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"f5483de2-8939-4696-969b-efa0a56de229","Type":"ContainerStarted","Data":"82bd310d7a55eebfcca22928a1892d001fb7617b7c0dfb8df88a88ae705a3395"} Sep 30 20:30:23 crc kubenswrapper[4919]: I0930 20:30:23.905440 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 30 20:30:23 crc kubenswrapper[4919]: I0930 20:30:23.912899 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f35330fc-f5b9-461f-801e-9ae42bd20866","Type":"ContainerStarted","Data":"617d776c7ff2ff8b4306ee795c7b63cac800340bca74410ccf809a184d248e3e"} Sep 30 20:30:23 crc kubenswrapper[4919]: I0930 20:30:23.916117 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3","Type":"ContainerStarted","Data":"1dbad7dd4e53f95ec6f845a07316d5a368ee2b24627068b51da64af0cba70d2a"} Sep 30 20:30:23 crc kubenswrapper[4919]: I0930 20:30:23.929033 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 20:30:23 crc kubenswrapper[4919]: I0930 20:30:23.970348 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=23.100581981 podStartE2EDuration="28.970324425s" podCreationTimestamp="2025-09-30 20:29:55 +0000 UTC" firstStartedPulling="2025-09-30 20:30:15.058260159 +0000 UTC m=+1000.174293286" lastFinishedPulling="2025-09-30 20:30:20.928002613 +0000 UTC m=+1006.044035730" observedRunningTime="2025-09-30 20:30:23.964912038 +0000 UTC m=+1009.080945165" watchObservedRunningTime="2025-09-30 20:30:23.970324425 +0000 UTC m=+1009.086357552" Sep 30 20:30:23 crc kubenswrapper[4919]: I0930 20:30:23.982047 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=19.283186653 podStartE2EDuration="26.982025963s" podCreationTimestamp="2025-09-30 20:29:57 +0000 UTC" firstStartedPulling="2025-09-30 20:30:15.428303583 +0000 UTC m=+1000.544336720" lastFinishedPulling="2025-09-30 20:30:23.127142903 +0000 UTC m=+1008.243176030" observedRunningTime="2025-09-30 20:30:23.981335503 +0000 UTC m=+1009.097368640" watchObservedRunningTime="2025-09-30 20:30:23.982025963 +0000 UTC m=+1009.098059110" Sep 30 20:30:24 crc kubenswrapper[4919]: I0930 20:30:24.939632 4919 generic.go:334] "Generic (PLEG): container finished" podID="52b6421a-9e6a-490d-9940-b2931f34aae1" containerID="4c26a30b2e2152cf1941e5da6fb3047769f98b90f55e534b9c4c5454f8e1f3a4" exitCode=0 Sep 30 20:30:24 crc kubenswrapper[4919]: I0930 20:30:24.939836 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ppjcf" event={"ID":"52b6421a-9e6a-490d-9940-b2931f34aae1","Type":"ContainerDied","Data":"4c26a30b2e2152cf1941e5da6fb3047769f98b90f55e534b9c4c5454f8e1f3a4"} Sep 30 20:30:24 crc kubenswrapper[4919]: I0930 20:30:24.944470 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3643ae76-bfa6-4d35-94ad-fedfa85b1977","Type":"ContainerStarted","Data":"e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973"} Sep 30 20:30:24 crc kubenswrapper[4919]: I0930 20:30:24.946263 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jx7tr" event={"ID":"8b4d1dc0-4d24-4128-a83b-9f37e7356309","Type":"ContainerStarted","Data":"3a6960a650269edacb7878d1b98e54c0ff14cf1cff7d9b12101c271813d53d42"} Sep 30 20:30:24 crc kubenswrapper[4919]: I0930 20:30:24.947019 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-jx7tr" Sep 30 20:30:24 crc kubenswrapper[4919]: I0930 20:30:24.949459 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"831f0cec-e526-41e4-851f-139ffef9bea5","Type":"ContainerStarted","Data":"93e7c638bdaeea46130f5231c2e87f66af1eac382e728abedd402b1d49dd981f"} Sep 30 20:30:25 crc kubenswrapper[4919]: I0930 20:30:25.007091 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-jx7tr" podStartSLOduration=17.498420917 podStartE2EDuration="25.007071491s" podCreationTimestamp="2025-09-30 20:30:00 +0000 UTC" firstStartedPulling="2025-09-30 20:30:15.475817429 +0000 UTC m=+1000.591850556" lastFinishedPulling="2025-09-30 20:30:22.984468003 +0000 UTC m=+1008.100501130" observedRunningTime="2025-09-30 20:30:25.001563152 +0000 UTC m=+1010.117596289" watchObservedRunningTime="2025-09-30 20:30:25.007071491 +0000 UTC m=+1010.123104638" Sep 30 20:30:25 crc kubenswrapper[4919]: I0930 20:30:25.960534 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ppjcf" event={"ID":"52b6421a-9e6a-490d-9940-b2931f34aae1","Type":"ContainerStarted","Data":"a062c34684c6a1d0c97eeef16700d72c71d15c6902d78f1a912c0a6342e5f15f"} Sep 30 20:30:26 crc kubenswrapper[4919]: I0930 20:30:26.969491 4919 generic.go:334] "Generic (PLEG): container finished" podID="d5e405b0-b5a3-4313-8fd2-b592b38e5926" containerID="b87909e2f159fd43f5bbdc55da5ffe40395a2424e960183740005f6756545d31" exitCode=0 Sep 30 20:30:26 crc kubenswrapper[4919]: I0930 20:30:26.969569 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"d5e405b0-b5a3-4313-8fd2-b592b38e5926","Type":"ContainerDied","Data":"b87909e2f159fd43f5bbdc55da5ffe40395a2424e960183740005f6756545d31"} Sep 30 20:30:26 crc kubenswrapper[4919]: I0930 20:30:26.973113 4919 generic.go:334] "Generic (PLEG): container finished" podID="f35330fc-f5b9-461f-801e-9ae42bd20866" containerID="617d776c7ff2ff8b4306ee795c7b63cac800340bca74410ccf809a184d248e3e" exitCode=0 Sep 30 20:30:26 crc kubenswrapper[4919]: I0930 20:30:26.973583 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f35330fc-f5b9-461f-801e-9ae42bd20866","Type":"ContainerDied","Data":"617d776c7ff2ff8b4306ee795c7b63cac800340bca74410ccf809a184d248e3e"} Sep 30 20:30:27 crc kubenswrapper[4919]: I0930 20:30:27.988079 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"d5e405b0-b5a3-4313-8fd2-b592b38e5926","Type":"ContainerStarted","Data":"ff6c4667141e956889c08c31b6072b4579aad1219eea37537a29422de4d14b4a"} Sep 30 20:30:27 crc kubenswrapper[4919]: I0930 20:30:27.992028 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"f35330fc-f5b9-461f-801e-9ae42bd20866","Type":"ContainerStarted","Data":"8d1f4729bf03395373cb145576a17ee240800819fa188c6e37539922768dd948"} Sep 30 20:30:27 crc kubenswrapper[4919]: I0930 20:30:27.994521 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3","Type":"ContainerStarted","Data":"a902fbc1220855d33c61cc02534a81abd0184db77cf6a364864740530c3657df"} Sep 30 20:30:27 crc kubenswrapper[4919]: I0930 20:30:27.996884 4919 generic.go:334] "Generic (PLEG): container finished" podID="8f73ed0f-186b-45f3-9776-ec31c69a5d56" containerID="c20f44853d956fa35cffa197521abf101b13c853f95e248dea4d23c896e558cf" exitCode=0 Sep 30 20:30:27 crc kubenswrapper[4919]: I0930 20:30:27.996989 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" event={"ID":"8f73ed0f-186b-45f3-9776-ec31c69a5d56","Type":"ContainerDied","Data":"c20f44853d956fa35cffa197521abf101b13c853f95e248dea4d23c896e558cf"} Sep 30 20:30:28 crc kubenswrapper[4919]: I0930 20:30:28.002855 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"87cf8edf-c133-4a62-939f-72dc079db17e","Type":"ContainerStarted","Data":"3b53df8d3da276983d8b5b54e6cc8a57236118fd87387c5222541431b99af655"} Sep 30 20:30:28 crc kubenswrapper[4919]: I0930 20:30:28.013672 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ppjcf" event={"ID":"52b6421a-9e6a-490d-9940-b2931f34aae1","Type":"ContainerStarted","Data":"41c3edd34395158bafbdfb8691adf5e403f337fc306fa4292b7f94ef8f2a733f"} Sep 30 20:30:28 crc kubenswrapper[4919]: I0930 20:30:28.014097 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:28 crc kubenswrapper[4919]: I0930 20:30:28.014376 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:28 crc kubenswrapper[4919]: I0930 20:30:28.041674 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=26.820337065 podStartE2EDuration="34.041643789s" podCreationTimestamp="2025-09-30 20:29:54 +0000 UTC" firstStartedPulling="2025-09-30 20:30:15.062975166 +0000 UTC m=+1000.179008283" lastFinishedPulling="2025-09-30 20:30:22.28428184 +0000 UTC m=+1007.400315007" observedRunningTime="2025-09-30 20:30:28.024766481 +0000 UTC m=+1013.140799658" watchObservedRunningTime="2025-09-30 20:30:28.041643789 +0000 UTC m=+1013.157676956" Sep 30 20:30:28 crc kubenswrapper[4919]: I0930 20:30:28.093781 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=28.884432343 podStartE2EDuration="35.093757488s" podCreationTimestamp="2025-09-30 20:29:53 +0000 UTC" firstStartedPulling="2025-09-30 20:30:15.445405318 +0000 UTC m=+1000.561438465" lastFinishedPulling="2025-09-30 20:30:21.654730483 +0000 UTC m=+1006.770763610" observedRunningTime="2025-09-30 20:30:28.087678052 +0000 UTC m=+1013.203711219" watchObservedRunningTime="2025-09-30 20:30:28.093757488 +0000 UTC m=+1013.209790635" Sep 30 20:30:28 crc kubenswrapper[4919]: I0930 20:30:28.100338 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-ppjcf" podStartSLOduration=22.51433813 podStartE2EDuration="28.100317348s" podCreationTimestamp="2025-09-30 20:30:00 +0000 UTC" firstStartedPulling="2025-09-30 20:30:16.698326703 +0000 UTC m=+1001.814359830" lastFinishedPulling="2025-09-30 20:30:22.284305911 +0000 UTC m=+1007.400339048" observedRunningTime="2025-09-30 20:30:28.064681556 +0000 UTC m=+1013.180714703" watchObservedRunningTime="2025-09-30 20:30:28.100317348 +0000 UTC m=+1013.216350485" Sep 30 20:30:28 crc kubenswrapper[4919]: I0930 20:30:28.123735 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=16.829761009 podStartE2EDuration="28.123708505s" podCreationTimestamp="2025-09-30 20:30:00 +0000 UTC" firstStartedPulling="2025-09-30 20:30:15.572982772 +0000 UTC m=+1000.689015899" lastFinishedPulling="2025-09-30 20:30:26.866930268 +0000 UTC m=+1011.982963395" observedRunningTime="2025-09-30 20:30:28.120275196 +0000 UTC m=+1013.236308363" watchObservedRunningTime="2025-09-30 20:30:28.123708505 +0000 UTC m=+1013.239741652" Sep 30 20:30:28 crc kubenswrapper[4919]: I0930 20:30:28.167782 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=14.784910236 podStartE2EDuration="24.167764961s" podCreationTimestamp="2025-09-30 20:30:04 +0000 UTC" firstStartedPulling="2025-09-30 20:30:17.458591885 +0000 UTC m=+1002.574625012" lastFinishedPulling="2025-09-30 20:30:26.84144661 +0000 UTC m=+1011.957479737" observedRunningTime="2025-09-30 20:30:28.165095034 +0000 UTC m=+1013.281128201" watchObservedRunningTime="2025-09-30 20:30:28.167764961 +0000 UTC m=+1013.283798088" Sep 30 20:30:28 crc kubenswrapper[4919]: I0930 20:30:28.683257 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:28 crc kubenswrapper[4919]: I0930 20:30:28.761400 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.037608 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" event={"ID":"8f73ed0f-186b-45f3-9776-ec31c69a5d56","Type":"ContainerStarted","Data":"c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6"} Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.038048 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.042750 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"567de3cf-1a4f-426d-b4d5-da78ead6e923","Type":"ContainerStarted","Data":"2a760f9f87e08a592631ecf86976cc2522b7c1236f4dead9c79de8addb7bc69a"} Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.043113 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.072911 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" podStartSLOduration=3.079599385 podStartE2EDuration="38.072885617s" podCreationTimestamp="2025-09-30 20:29:51 +0000 UTC" firstStartedPulling="2025-09-30 20:29:52.108909108 +0000 UTC m=+977.224942235" lastFinishedPulling="2025-09-30 20:30:27.10219534 +0000 UTC m=+1012.218228467" observedRunningTime="2025-09-30 20:30:29.066592694 +0000 UTC m=+1014.182625881" watchObservedRunningTime="2025-09-30 20:30:29.072885617 +0000 UTC m=+1014.188918784" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.094373 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.392238 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-zm2m4"] Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.435098 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-x6xnp"] Sep 30 20:30:29 crc kubenswrapper[4919]: E0930 20:30:29.435516 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17" containerName="collect-profiles" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.435527 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17" containerName="collect-profiles" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.435670 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="ccd8b2d8-28c3-42a2-a3d9-e4fb724eea17" containerName="collect-profiles" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.436533 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.439282 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.462825 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-77ggv"] Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.464106 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.469002 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.478562 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-x6xnp"] Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.487252 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-77ggv"] Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.526406 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5d9h\" (UniqueName: \"kubernetes.io/projected/afedbc98-c0b8-4924-958c-4069e94a167f-kube-api-access-p5d9h\") pod \"dnsmasq-dns-7fd796d7df-x6xnp\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.526484 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-x6xnp\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.526551 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-config\") pod \"dnsmasq-dns-7fd796d7df-x6xnp\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.526584 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-x6xnp\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.628434 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-config\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.628729 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-x6xnp\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.628779 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5d9h\" (UniqueName: \"kubernetes.io/projected/afedbc98-c0b8-4924-958c-4069e94a167f-kube-api-access-p5d9h\") pod \"dnsmasq-dns-7fd796d7df-x6xnp\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.628809 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgsd4\" (UniqueName: \"kubernetes.io/projected/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-kube-api-access-pgsd4\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.628848 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-ovs-rundir\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.628933 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-x6xnp\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.628957 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-ovn-rundir\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.628995 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-combined-ca-bundle\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.629032 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.629093 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-config\") pod \"dnsmasq-dns-7fd796d7df-x6xnp\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.629793 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-x6xnp\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.629980 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-config\") pod \"dnsmasq-dns-7fd796d7df-x6xnp\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.630324 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-x6xnp\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.654572 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5d9h\" (UniqueName: \"kubernetes.io/projected/afedbc98-c0b8-4924-958c-4069e94a167f-kube-api-access-p5d9h\") pod \"dnsmasq-dns-7fd796d7df-x6xnp\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.719610 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.724471 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-88rpf"] Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.753186 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgsd4\" (UniqueName: \"kubernetes.io/projected/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-kube-api-access-pgsd4\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.753284 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-ovs-rundir\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.753332 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-ovn-rundir\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.753374 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-combined-ca-bundle\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.753421 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.753510 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-config\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.754363 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-config\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.755317 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-ovs-rundir\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.755394 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-ovn-rundir\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.761696 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-combined-ca-bundle\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.763772 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.765168 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-lkb6x"] Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.766377 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.768469 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.769032 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.777695 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgsd4\" (UniqueName: \"kubernetes.io/projected/a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea-kube-api-access-pgsd4\") pod \"ovn-controller-metrics-77ggv\" (UID: \"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea\") " pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.789923 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-77ggv" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.800764 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.839371 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-lkb6x"] Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.868868 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.956761 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-config\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.957166 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.957289 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.957497 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5hqw\" (UniqueName: \"kubernetes.io/projected/fedb355b-baae-4ce0-b68b-041578c10496-kube-api-access-c5hqw\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:29 crc kubenswrapper[4919]: I0930 20:30:29.957688 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.052470 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.052540 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-zm2m4" event={"ID":"f1309388-8cc1-45d3-9d03-da1049ea176f","Type":"ContainerDied","Data":"ab1c1e5e28dee287aa03df2ea83a9f08ac147834954116b11bb71e0c129bfe7e"} Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.053732 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.061879 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1309388-8cc1-45d3-9d03-da1049ea176f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f1309388-8cc1-45d3-9d03-da1049ea176f" (UID: "f1309388-8cc1-45d3-9d03-da1049ea176f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.062006 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1309388-8cc1-45d3-9d03-da1049ea176f-dns-svc\") pod \"f1309388-8cc1-45d3-9d03-da1049ea176f\" (UID: \"f1309388-8cc1-45d3-9d03-da1049ea176f\") " Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.062127 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1309388-8cc1-45d3-9d03-da1049ea176f-config\") pod \"f1309388-8cc1-45d3-9d03-da1049ea176f\" (UID: \"f1309388-8cc1-45d3-9d03-da1049ea176f\") " Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.062575 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1309388-8cc1-45d3-9d03-da1049ea176f-config" (OuterVolumeSpecName: "config") pod "f1309388-8cc1-45d3-9d03-da1049ea176f" (UID: "f1309388-8cc1-45d3-9d03-da1049ea176f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.063344 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfvcs\" (UniqueName: \"kubernetes.io/projected/f1309388-8cc1-45d3-9d03-da1049ea176f-kube-api-access-mfvcs\") pod \"f1309388-8cc1-45d3-9d03-da1049ea176f\" (UID: \"f1309388-8cc1-45d3-9d03-da1049ea176f\") " Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.063728 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-config\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.063766 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.063945 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.064000 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5hqw\" (UniqueName: \"kubernetes.io/projected/fedb355b-baae-4ce0-b68b-041578c10496-kube-api-access-c5hqw\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.064101 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.064384 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f1309388-8cc1-45d3-9d03-da1049ea176f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.064411 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f1309388-8cc1-45d3-9d03-da1049ea176f-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.064819 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.065014 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.065122 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.066191 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-config\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.072352 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1309388-8cc1-45d3-9d03-da1049ea176f-kube-api-access-mfvcs" (OuterVolumeSpecName: "kube-api-access-mfvcs") pod "f1309388-8cc1-45d3-9d03-da1049ea176f" (UID: "f1309388-8cc1-45d3-9d03-da1049ea176f"). InnerVolumeSpecName "kube-api-access-mfvcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.085316 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5hqw\" (UniqueName: \"kubernetes.io/projected/fedb355b-baae-4ce0-b68b-041578c10496-kube-api-access-c5hqw\") pod \"dnsmasq-dns-86db49b7ff-lkb6x\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.094615 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.165921 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfvcs\" (UniqueName: \"kubernetes.io/projected/f1309388-8cc1-45d3-9d03-da1049ea176f-kube-api-access-mfvcs\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.178769 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.237568 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.238928 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.245556 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.246337 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.246591 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.247273 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-6bxxx" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.248347 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.256340 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-x6xnp"] Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.341835 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-77ggv"] Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.371366 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa074105-b5b2-44de-b6f1-5c62086574e4-scripts\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.371444 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sc6d8\" (UniqueName: \"kubernetes.io/projected/fa074105-b5b2-44de-b6f1-5c62086574e4-kube-api-access-sc6d8\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.371470 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa074105-b5b2-44de-b6f1-5c62086574e4-config\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.371498 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa074105-b5b2-44de-b6f1-5c62086574e4-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.371548 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa074105-b5b2-44de-b6f1-5c62086574e4-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.371575 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fa074105-b5b2-44de-b6f1-5c62086574e4-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.371600 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa074105-b5b2-44de-b6f1-5c62086574e4-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.458256 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-zm2m4"] Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.464013 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-zm2m4"] Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.472581 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa074105-b5b2-44de-b6f1-5c62086574e4-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.472630 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fa074105-b5b2-44de-b6f1-5c62086574e4-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.472665 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa074105-b5b2-44de-b6f1-5c62086574e4-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.472711 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa074105-b5b2-44de-b6f1-5c62086574e4-scripts\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.472767 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sc6d8\" (UniqueName: \"kubernetes.io/projected/fa074105-b5b2-44de-b6f1-5c62086574e4-kube-api-access-sc6d8\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.472794 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa074105-b5b2-44de-b6f1-5c62086574e4-config\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.472830 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa074105-b5b2-44de-b6f1-5c62086574e4-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.477943 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa074105-b5b2-44de-b6f1-5c62086574e4-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.478345 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fa074105-b5b2-44de-b6f1-5c62086574e4-scripts\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.478580 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fa074105-b5b2-44de-b6f1-5c62086574e4-config\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.478705 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fa074105-b5b2-44de-b6f1-5c62086574e4-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.479907 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa074105-b5b2-44de-b6f1-5c62086574e4-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.496409 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa074105-b5b2-44de-b6f1-5c62086574e4-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.502297 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sc6d8\" (UniqueName: \"kubernetes.io/projected/fa074105-b5b2-44de-b6f1-5c62086574e4-kube-api-access-sc6d8\") pod \"ovn-northd-0\" (UID: \"fa074105-b5b2-44de-b6f1-5c62086574e4\") " pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.573454 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.690400 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-lkb6x"] Sep 30 20:30:30 crc kubenswrapper[4919]: W0930 20:30:30.697415 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfedb355b_baae_4ce0_b68b_041578c10496.slice/crio-ed84059fe090d310171685c933038718606cc8ebccf2e37b558360799a25e653 WatchSource:0}: Error finding container ed84059fe090d310171685c933038718606cc8ebccf2e37b558360799a25e653: Status 404 returned error can't find the container with id ed84059fe090d310171685c933038718606cc8ebccf2e37b558360799a25e653 Sep 30 20:30:30 crc kubenswrapper[4919]: I0930 20:30:30.923482 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.074027 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-77ggv" event={"ID":"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea","Type":"ContainerStarted","Data":"5fe1428d5533c4e7bdd5d8003fd8d71a2b499e6ffe4777652c35f174470266d6"} Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.074081 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-77ggv" event={"ID":"a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea","Type":"ContainerStarted","Data":"4b244351136cdb81901e437cb0cd07cd6da1c7f0d9fa8a7a8213fdafe764d395"} Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.084640 4919 generic.go:334] "Generic (PLEG): container finished" podID="afedbc98-c0b8-4924-958c-4069e94a167f" containerID="37c23bc6806ee2eaf7d292ecbad216010e8b3f5673ea2ccf34446928c1761456" exitCode=0 Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.084740 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" event={"ID":"afedbc98-c0b8-4924-958c-4069e94a167f","Type":"ContainerDied","Data":"37c23bc6806ee2eaf7d292ecbad216010e8b3f5673ea2ccf34446928c1761456"} Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.084806 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" event={"ID":"afedbc98-c0b8-4924-958c-4069e94a167f","Type":"ContainerStarted","Data":"bf06a519a0bb99dc73a18f8e6f32439a5fbf0346e8fdd7b8a56896c7323ca818"} Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.091245 4919 generic.go:334] "Generic (PLEG): container finished" podID="fedb355b-baae-4ce0-b68b-041578c10496" containerID="b724ec8fa528274a588503d7d1d9e8117230692bbbc7d4c0b09ed014ec9b0b5d" exitCode=0 Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.091484 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" podUID="8f73ed0f-186b-45f3-9776-ec31c69a5d56" containerName="dnsmasq-dns" containerID="cri-o://c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6" gracePeriod=10 Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.092584 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" event={"ID":"fedb355b-baae-4ce0-b68b-041578c10496","Type":"ContainerDied","Data":"b724ec8fa528274a588503d7d1d9e8117230692bbbc7d4c0b09ed014ec9b0b5d"} Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.092625 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" event={"ID":"fedb355b-baae-4ce0-b68b-041578c10496","Type":"ContainerStarted","Data":"ed84059fe090d310171685c933038718606cc8ebccf2e37b558360799a25e653"} Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.093405 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.096574 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-77ggv" podStartSLOduration=2.096555086 podStartE2EDuration="2.096555086s" podCreationTimestamp="2025-09-30 20:30:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:30:31.089363528 +0000 UTC m=+1016.205396655" watchObservedRunningTime="2025-09-30 20:30:31.096555086 +0000 UTC m=+1016.212588223" Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.454437 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.553759 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f73ed0f-186b-45f3-9776-ec31c69a5d56-config\") pod \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\" (UID: \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\") " Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.554029 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2f4f\" (UniqueName: \"kubernetes.io/projected/8f73ed0f-186b-45f3-9776-ec31c69a5d56-kube-api-access-n2f4f\") pod \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\" (UID: \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\") " Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.554068 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f73ed0f-186b-45f3-9776-ec31c69a5d56-dns-svc\") pod \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\" (UID: \"8f73ed0f-186b-45f3-9776-ec31c69a5d56\") " Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.558876 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f73ed0f-186b-45f3-9776-ec31c69a5d56-kube-api-access-n2f4f" (OuterVolumeSpecName: "kube-api-access-n2f4f") pod "8f73ed0f-186b-45f3-9776-ec31c69a5d56" (UID: "8f73ed0f-186b-45f3-9776-ec31c69a5d56"). InnerVolumeSpecName "kube-api-access-n2f4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.595141 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f73ed0f-186b-45f3-9776-ec31c69a5d56-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8f73ed0f-186b-45f3-9776-ec31c69a5d56" (UID: "8f73ed0f-186b-45f3-9776-ec31c69a5d56"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.629968 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f73ed0f-186b-45f3-9776-ec31c69a5d56-config" (OuterVolumeSpecName: "config") pod "8f73ed0f-186b-45f3-9776-ec31c69a5d56" (UID: "8f73ed0f-186b-45f3-9776-ec31c69a5d56"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.646261 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1309388-8cc1-45d3-9d03-da1049ea176f" path="/var/lib/kubelet/pods/f1309388-8cc1-45d3-9d03-da1049ea176f/volumes" Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.655674 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f73ed0f-186b-45f3-9776-ec31c69a5d56-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.655707 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2f4f\" (UniqueName: \"kubernetes.io/projected/8f73ed0f-186b-45f3-9776-ec31c69a5d56-kube-api-access-n2f4f\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:31 crc kubenswrapper[4919]: I0930 20:30:31.655718 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8f73ed0f-186b-45f3-9776-ec31c69a5d56-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:32 crc kubenswrapper[4919]: E0930 20:30:32.057750 4919 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.75:48690->38.102.83.75:44333: write tcp 38.102.83.75:48690->38.102.83.75:44333: write: connection reset by peer Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.114488 4919 generic.go:334] "Generic (PLEG): container finished" podID="8f73ed0f-186b-45f3-9776-ec31c69a5d56" containerID="c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6" exitCode=0 Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.114562 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.114582 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" event={"ID":"8f73ed0f-186b-45f3-9776-ec31c69a5d56","Type":"ContainerDied","Data":"c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6"} Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.114617 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-88rpf" event={"ID":"8f73ed0f-186b-45f3-9776-ec31c69a5d56","Type":"ContainerDied","Data":"a91862118572424a4f61d9ce677fe0eaaf9e63caa27e4e0127688885a4b02801"} Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.114636 4919 scope.go:117] "RemoveContainer" containerID="c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6" Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.117402 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" event={"ID":"fedb355b-baae-4ce0-b68b-041578c10496","Type":"ContainerStarted","Data":"8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1"} Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.117534 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.119684 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fa074105-b5b2-44de-b6f1-5c62086574e4","Type":"ContainerStarted","Data":"4f63961120df256269434417485d752771f9e443309071fccfd55e51022d91c5"} Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.121850 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" event={"ID":"afedbc98-c0b8-4924-958c-4069e94a167f","Type":"ContainerStarted","Data":"9a82526b8f5058a56a1e6f3b0e41df2ad3d4cf08a9d588df9f150ff8bbe71d72"} Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.122154 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.142446 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" podStartSLOduration=3.142427507 podStartE2EDuration="3.142427507s" podCreationTimestamp="2025-09-30 20:30:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:30:32.135555818 +0000 UTC m=+1017.251588965" watchObservedRunningTime="2025-09-30 20:30:32.142427507 +0000 UTC m=+1017.258460624" Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.157551 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" podStartSLOduration=3.157534535 podStartE2EDuration="3.157534535s" podCreationTimestamp="2025-09-30 20:30:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:30:32.150009727 +0000 UTC m=+1017.266042854" watchObservedRunningTime="2025-09-30 20:30:32.157534535 +0000 UTC m=+1017.273567662" Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.166065 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-88rpf"] Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.172358 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-88rpf"] Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.203839 4919 scope.go:117] "RemoveContainer" containerID="c20f44853d956fa35cffa197521abf101b13c853f95e248dea4d23c896e558cf" Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.224440 4919 scope.go:117] "RemoveContainer" containerID="c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6" Sep 30 20:30:32 crc kubenswrapper[4919]: E0930 20:30:32.228622 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6\": container with ID starting with c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6 not found: ID does not exist" containerID="c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6" Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.228655 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6"} err="failed to get container status \"c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6\": rpc error: code = NotFound desc = could not find container \"c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6\": container with ID starting with c00ed257601f30e9fa9e2b732a50000d5d8951892f3bf152a5c41d5e63d280f6 not found: ID does not exist" Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.228676 4919 scope.go:117] "RemoveContainer" containerID="c20f44853d956fa35cffa197521abf101b13c853f95e248dea4d23c896e558cf" Sep 30 20:30:32 crc kubenswrapper[4919]: E0930 20:30:32.229094 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c20f44853d956fa35cffa197521abf101b13c853f95e248dea4d23c896e558cf\": container with ID starting with c20f44853d956fa35cffa197521abf101b13c853f95e248dea4d23c896e558cf not found: ID does not exist" containerID="c20f44853d956fa35cffa197521abf101b13c853f95e248dea4d23c896e558cf" Sep 30 20:30:32 crc kubenswrapper[4919]: I0930 20:30:32.229145 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c20f44853d956fa35cffa197521abf101b13c853f95e248dea4d23c896e558cf"} err="failed to get container status \"c20f44853d956fa35cffa197521abf101b13c853f95e248dea4d23c896e558cf\": rpc error: code = NotFound desc = could not find container \"c20f44853d956fa35cffa197521abf101b13c853f95e248dea4d23c896e558cf\": container with ID starting with c20f44853d956fa35cffa197521abf101b13c853f95e248dea4d23c896e558cf not found: ID does not exist" Sep 30 20:30:33 crc kubenswrapper[4919]: I0930 20:30:33.134250 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fa074105-b5b2-44de-b6f1-5c62086574e4","Type":"ContainerStarted","Data":"a3a72aa30e1936936a22448a568fb30432f7d056a8b7345544ea21e431ffe8e1"} Sep 30 20:30:33 crc kubenswrapper[4919]: I0930 20:30:33.134637 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 30 20:30:33 crc kubenswrapper[4919]: I0930 20:30:33.134656 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"fa074105-b5b2-44de-b6f1-5c62086574e4","Type":"ContainerStarted","Data":"d957ea185375974efb4801ed445e37c4aba67f499834b3346c2a172bc8f575ae"} Sep 30 20:30:33 crc kubenswrapper[4919]: I0930 20:30:33.173370 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.0250673 podStartE2EDuration="3.173341345s" podCreationTimestamp="2025-09-30 20:30:30 +0000 UTC" firstStartedPulling="2025-09-30 20:30:31.102358855 +0000 UTC m=+1016.218391982" lastFinishedPulling="2025-09-30 20:30:32.2506329 +0000 UTC m=+1017.366666027" observedRunningTime="2025-09-30 20:30:33.160819303 +0000 UTC m=+1018.276852440" watchObservedRunningTime="2025-09-30 20:30:33.173341345 +0000 UTC m=+1018.289374512" Sep 30 20:30:33 crc kubenswrapper[4919]: I0930 20:30:33.676807 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f73ed0f-186b-45f3-9776-ec31c69a5d56" path="/var/lib/kubelet/pods/8f73ed0f-186b-45f3-9776-ec31c69a5d56/volumes" Sep 30 20:30:35 crc kubenswrapper[4919]: I0930 20:30:35.193364 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 30 20:30:35 crc kubenswrapper[4919]: I0930 20:30:35.193439 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 30 20:30:35 crc kubenswrapper[4919]: I0930 20:30:35.285500 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 30 20:30:35 crc kubenswrapper[4919]: I0930 20:30:35.507979 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 30 20:30:35 crc kubenswrapper[4919]: I0930 20:30:35.508099 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 30 20:30:36 crc kubenswrapper[4919]: I0930 20:30:36.088272 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 30 20:30:36 crc kubenswrapper[4919]: I0930 20:30:36.250356 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 30 20:30:36 crc kubenswrapper[4919]: I0930 20:30:36.266423 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 30 20:30:37 crc kubenswrapper[4919]: I0930 20:30:37.791091 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 20:30:37 crc kubenswrapper[4919]: I0930 20:30:37.849327 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-x6xnp"] Sep 30 20:30:37 crc kubenswrapper[4919]: I0930 20:30:37.849517 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" podUID="afedbc98-c0b8-4924-958c-4069e94a167f" containerName="dnsmasq-dns" containerID="cri-o://9a82526b8f5058a56a1e6f3b0e41df2ad3d4cf08a9d588df9f150ff8bbe71d72" gracePeriod=10 Sep 30 20:30:37 crc kubenswrapper[4919]: I0930 20:30:37.851726 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:37 crc kubenswrapper[4919]: I0930 20:30:37.909453 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-v4hj5"] Sep 30 20:30:37 crc kubenswrapper[4919]: E0930 20:30:37.910041 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f73ed0f-186b-45f3-9776-ec31c69a5d56" containerName="init" Sep 30 20:30:37 crc kubenswrapper[4919]: I0930 20:30:37.910059 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f73ed0f-186b-45f3-9776-ec31c69a5d56" containerName="init" Sep 30 20:30:37 crc kubenswrapper[4919]: E0930 20:30:37.910080 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f73ed0f-186b-45f3-9776-ec31c69a5d56" containerName="dnsmasq-dns" Sep 30 20:30:37 crc kubenswrapper[4919]: I0930 20:30:37.910086 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f73ed0f-186b-45f3-9776-ec31c69a5d56" containerName="dnsmasq-dns" Sep 30 20:30:37 crc kubenswrapper[4919]: I0930 20:30:37.910264 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f73ed0f-186b-45f3-9776-ec31c69a5d56" containerName="dnsmasq-dns" Sep 30 20:30:37 crc kubenswrapper[4919]: I0930 20:30:37.911202 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:37 crc kubenswrapper[4919]: I0930 20:30:37.924590 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-v4hj5"] Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.023328 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.023420 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.023451 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bvnt\" (UniqueName: \"kubernetes.io/projected/3f366a95-3a67-445a-9682-ec419dc21deb-kube-api-access-6bvnt\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.023469 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-config\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.023493 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-dns-svc\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.125653 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-dns-svc\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.125775 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.125868 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.125908 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bvnt\" (UniqueName: \"kubernetes.io/projected/3f366a95-3a67-445a-9682-ec419dc21deb-kube-api-access-6bvnt\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.125933 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-config\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.126826 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-dns-svc\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.126832 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.127255 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-config\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.127427 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.148705 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bvnt\" (UniqueName: \"kubernetes.io/projected/3f366a95-3a67-445a-9682-ec419dc21deb-kube-api-access-6bvnt\") pod \"dnsmasq-dns-698758b865-v4hj5\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.198176 4919 generic.go:334] "Generic (PLEG): container finished" podID="afedbc98-c0b8-4924-958c-4069e94a167f" containerID="9a82526b8f5058a56a1e6f3b0e41df2ad3d4cf08a9d588df9f150ff8bbe71d72" exitCode=0 Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.198426 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" event={"ID":"afedbc98-c0b8-4924-958c-4069e94a167f","Type":"ContainerDied","Data":"9a82526b8f5058a56a1e6f3b0e41df2ad3d4cf08a9d588df9f150ff8bbe71d72"} Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.256377 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.348964 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.430721 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-config\") pod \"afedbc98-c0b8-4924-958c-4069e94a167f\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.430787 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-ovsdbserver-nb\") pod \"afedbc98-c0b8-4924-958c-4069e94a167f\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.430831 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-dns-svc\") pod \"afedbc98-c0b8-4924-958c-4069e94a167f\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.430968 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5d9h\" (UniqueName: \"kubernetes.io/projected/afedbc98-c0b8-4924-958c-4069e94a167f-kube-api-access-p5d9h\") pod \"afedbc98-c0b8-4924-958c-4069e94a167f\" (UID: \"afedbc98-c0b8-4924-958c-4069e94a167f\") " Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.435900 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afedbc98-c0b8-4924-958c-4069e94a167f-kube-api-access-p5d9h" (OuterVolumeSpecName: "kube-api-access-p5d9h") pod "afedbc98-c0b8-4924-958c-4069e94a167f" (UID: "afedbc98-c0b8-4924-958c-4069e94a167f"). InnerVolumeSpecName "kube-api-access-p5d9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.466047 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "afedbc98-c0b8-4924-958c-4069e94a167f" (UID: "afedbc98-c0b8-4924-958c-4069e94a167f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.469409 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "afedbc98-c0b8-4924-958c-4069e94a167f" (UID: "afedbc98-c0b8-4924-958c-4069e94a167f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.474877 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-config" (OuterVolumeSpecName: "config") pod "afedbc98-c0b8-4924-958c-4069e94a167f" (UID: "afedbc98-c0b8-4924-958c-4069e94a167f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.533137 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5d9h\" (UniqueName: \"kubernetes.io/projected/afedbc98-c0b8-4924-958c-4069e94a167f-kube-api-access-p5d9h\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.533170 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.533182 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.533190 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afedbc98-c0b8-4924-958c-4069e94a167f-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.673074 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-v4hj5"] Sep 30 20:30:38 crc kubenswrapper[4919]: W0930 20:30:38.678622 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f366a95_3a67_445a_9682_ec419dc21deb.slice/crio-24f63fdd75f1dbbfcfb9c663f7e17bee603fb1e7e46639dd35bf455c8d4ace2b WatchSource:0}: Error finding container 24f63fdd75f1dbbfcfb9c663f7e17bee603fb1e7e46639dd35bf455c8d4ace2b: Status 404 returned error can't find the container with id 24f63fdd75f1dbbfcfb9c663f7e17bee603fb1e7e46639dd35bf455c8d4ace2b Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.972794 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 30 20:30:38 crc kubenswrapper[4919]: E0930 20:30:38.973350 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afedbc98-c0b8-4924-958c-4069e94a167f" containerName="dnsmasq-dns" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.973373 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="afedbc98-c0b8-4924-958c-4069e94a167f" containerName="dnsmasq-dns" Sep 30 20:30:38 crc kubenswrapper[4919]: E0930 20:30:38.973391 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afedbc98-c0b8-4924-958c-4069e94a167f" containerName="init" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.973404 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="afedbc98-c0b8-4924-958c-4069e94a167f" containerName="init" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.973786 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="afedbc98-c0b8-4924-958c-4069e94a167f" containerName="dnsmasq-dns" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.984424 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.995346 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-8v2l9" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.995569 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.995624 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.995568 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 30 20:30:38 crc kubenswrapper[4919]: I0930 20:30:38.997324 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.141622 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.141688 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/30462126-2244-47cd-8076-12744196012d-lock\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.141727 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7kq2\" (UniqueName: \"kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-kube-api-access-n7kq2\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.141881 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.141998 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/30462126-2244-47cd-8076-12744196012d-cache\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.209096 4919 generic.go:334] "Generic (PLEG): container finished" podID="3f366a95-3a67-445a-9682-ec419dc21deb" containerID="c3abd68d4514d515945d35438eb611d1dfd7314646f9adcbe57f46fb4a785ca8" exitCode=0 Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.209170 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-v4hj5" event={"ID":"3f366a95-3a67-445a-9682-ec419dc21deb","Type":"ContainerDied","Data":"c3abd68d4514d515945d35438eb611d1dfd7314646f9adcbe57f46fb4a785ca8"} Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.209200 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-v4hj5" event={"ID":"3f366a95-3a67-445a-9682-ec419dc21deb","Type":"ContainerStarted","Data":"24f63fdd75f1dbbfcfb9c663f7e17bee603fb1e7e46639dd35bf455c8d4ace2b"} Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.212361 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" event={"ID":"afedbc98-c0b8-4924-958c-4069e94a167f","Type":"ContainerDied","Data":"bf06a519a0bb99dc73a18f8e6f32439a5fbf0346e8fdd7b8a56896c7323ca818"} Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.212401 4919 scope.go:117] "RemoveContainer" containerID="9a82526b8f5058a56a1e6f3b0e41df2ad3d4cf08a9d588df9f150ff8bbe71d72" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.212432 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-x6xnp" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.243868 4919 scope.go:117] "RemoveContainer" containerID="37c23bc6806ee2eaf7d292ecbad216010e8b3f5673ea2ccf34446928c1761456" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.244006 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.244600 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/30462126-2244-47cd-8076-12744196012d-cache\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.244862 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.244910 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/30462126-2244-47cd-8076-12744196012d-lock\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.244946 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7kq2\" (UniqueName: \"kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-kube-api-access-n7kq2\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: E0930 20:30:39.245188 4919 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 20:30:39 crc kubenswrapper[4919]: E0930 20:30:39.245261 4919 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 20:30:39 crc kubenswrapper[4919]: E0930 20:30:39.245328 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift podName:30462126-2244-47cd-8076-12744196012d nodeName:}" failed. No retries permitted until 2025-09-30 20:30:39.745302763 +0000 UTC m=+1024.861335930 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift") pod "swift-storage-0" (UID: "30462126-2244-47cd-8076-12744196012d") : configmap "swift-ring-files" not found Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.245617 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/30462126-2244-47cd-8076-12744196012d-cache\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.245818 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.246022 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/30462126-2244-47cd-8076-12744196012d-lock\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.270019 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7kq2\" (UniqueName: \"kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-kube-api-access-n7kq2\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.289550 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.427540 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-x6xnp"] Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.432972 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-x6xnp"] Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.494394 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-h6ch2"] Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.495820 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.499744 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.500918 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.502260 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.508146 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-h6ch2"] Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.550709 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-dispersionconf\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.550758 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-etc-swift\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.550795 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-ring-data-devices\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.550933 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-combined-ca-bundle\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.551009 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-scripts\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.551115 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-swiftconf\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.551148 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjn2k\" (UniqueName: \"kubernetes.io/projected/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-kube-api-access-fjn2k\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.640176 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afedbc98-c0b8-4924-958c-4069e94a167f" path="/var/lib/kubelet/pods/afedbc98-c0b8-4924-958c-4069e94a167f/volumes" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.652334 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-dispersionconf\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.652520 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-etc-swift\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.652612 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-ring-data-devices\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.652702 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-combined-ca-bundle\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.652789 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-scripts\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.652897 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-etc-swift\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.652904 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-swiftconf\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.652957 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjn2k\" (UniqueName: \"kubernetes.io/projected/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-kube-api-access-fjn2k\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.653449 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-ring-data-devices\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.653814 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-scripts\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.656066 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-swiftconf\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.656744 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-combined-ca-bundle\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.658095 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-dispersionconf\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.671044 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjn2k\" (UniqueName: \"kubernetes.io/projected/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-kube-api-access-fjn2k\") pod \"swift-ring-rebalance-h6ch2\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.754093 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:39 crc kubenswrapper[4919]: E0930 20:30:39.754277 4919 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 20:30:39 crc kubenswrapper[4919]: E0930 20:30:39.754293 4919 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 20:30:39 crc kubenswrapper[4919]: E0930 20:30:39.754340 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift podName:30462126-2244-47cd-8076-12744196012d nodeName:}" failed. No retries permitted until 2025-09-30 20:30:40.754324861 +0000 UTC m=+1025.870357988 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift") pod "swift-storage-0" (UID: "30462126-2244-47cd-8076-12744196012d") : configmap "swift-ring-files" not found Sep 30 20:30:39 crc kubenswrapper[4919]: I0930 20:30:39.808997 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:40 crc kubenswrapper[4919]: I0930 20:30:40.181393 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:40 crc kubenswrapper[4919]: I0930 20:30:40.230911 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-v4hj5" event={"ID":"3f366a95-3a67-445a-9682-ec419dc21deb","Type":"ContainerStarted","Data":"5dfa852199e81247e0f8bb47782146020480d46d9987d54f3751d19074a32a84"} Sep 30 20:30:40 crc kubenswrapper[4919]: I0930 20:30:40.231443 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:40 crc kubenswrapper[4919]: I0930 20:30:40.267492 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-v4hj5" podStartSLOduration=3.2674747379999998 podStartE2EDuration="3.267474738s" podCreationTimestamp="2025-09-30 20:30:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:30:40.255743878 +0000 UTC m=+1025.371777015" watchObservedRunningTime="2025-09-30 20:30:40.267474738 +0000 UTC m=+1025.383507865" Sep 30 20:30:40 crc kubenswrapper[4919]: I0930 20:30:40.308989 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-h6ch2"] Sep 30 20:30:40 crc kubenswrapper[4919]: I0930 20:30:40.778243 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:40 crc kubenswrapper[4919]: E0930 20:30:40.778365 4919 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 20:30:40 crc kubenswrapper[4919]: E0930 20:30:40.778397 4919 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 20:30:40 crc kubenswrapper[4919]: E0930 20:30:40.778456 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift podName:30462126-2244-47cd-8076-12744196012d nodeName:}" failed. No retries permitted until 2025-09-30 20:30:42.778438142 +0000 UTC m=+1027.894471269 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift") pod "swift-storage-0" (UID: "30462126-2244-47cd-8076-12744196012d") : configmap "swift-ring-files" not found Sep 30 20:30:41 crc kubenswrapper[4919]: I0930 20:30:41.170655 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-ztsl6"] Sep 30 20:30:41 crc kubenswrapper[4919]: I0930 20:30:41.171830 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-ztsl6" Sep 30 20:30:41 crc kubenswrapper[4919]: I0930 20:30:41.180647 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-ztsl6"] Sep 30 20:30:41 crc kubenswrapper[4919]: I0930 20:30:41.239436 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-h6ch2" event={"ID":"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad","Type":"ContainerStarted","Data":"463908def5a34ab631c5a245d09ea9f068853101648e21d88bf7ccc6d93a7ee9"} Sep 30 20:30:41 crc kubenswrapper[4919]: I0930 20:30:41.288344 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twwb8\" (UniqueName: \"kubernetes.io/projected/e6416142-17e9-4398-a65d-10dbcfc06411-kube-api-access-twwb8\") pod \"glance-db-create-ztsl6\" (UID: \"e6416142-17e9-4398-a65d-10dbcfc06411\") " pod="openstack/glance-db-create-ztsl6" Sep 30 20:30:41 crc kubenswrapper[4919]: I0930 20:30:41.390112 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twwb8\" (UniqueName: \"kubernetes.io/projected/e6416142-17e9-4398-a65d-10dbcfc06411-kube-api-access-twwb8\") pod \"glance-db-create-ztsl6\" (UID: \"e6416142-17e9-4398-a65d-10dbcfc06411\") " pod="openstack/glance-db-create-ztsl6" Sep 30 20:30:41 crc kubenswrapper[4919]: I0930 20:30:41.408955 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twwb8\" (UniqueName: \"kubernetes.io/projected/e6416142-17e9-4398-a65d-10dbcfc06411-kube-api-access-twwb8\") pod \"glance-db-create-ztsl6\" (UID: \"e6416142-17e9-4398-a65d-10dbcfc06411\") " pod="openstack/glance-db-create-ztsl6" Sep 30 20:30:41 crc kubenswrapper[4919]: I0930 20:30:41.491859 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-ztsl6" Sep 30 20:30:41 crc kubenswrapper[4919]: I0930 20:30:41.807283 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-ztsl6"] Sep 30 20:30:41 crc kubenswrapper[4919]: W0930 20:30:41.809938 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6416142_17e9_4398_a65d_10dbcfc06411.slice/crio-d3e1d50fa60bd4bc18f22bea64a65fa82bbfcf3cf7b91e99f79d0ea5f96f4d5c WatchSource:0}: Error finding container d3e1d50fa60bd4bc18f22bea64a65fa82bbfcf3cf7b91e99f79d0ea5f96f4d5c: Status 404 returned error can't find the container with id d3e1d50fa60bd4bc18f22bea64a65fa82bbfcf3cf7b91e99f79d0ea5f96f4d5c Sep 30 20:30:42 crc kubenswrapper[4919]: I0930 20:30:42.248307 4919 generic.go:334] "Generic (PLEG): container finished" podID="e6416142-17e9-4398-a65d-10dbcfc06411" containerID="4130af02109cfee47a5aa40a8b2cda64efc03e92d7cc9e870ac57ad1811c299a" exitCode=0 Sep 30 20:30:42 crc kubenswrapper[4919]: I0930 20:30:42.248396 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-ztsl6" event={"ID":"e6416142-17e9-4398-a65d-10dbcfc06411","Type":"ContainerDied","Data":"4130af02109cfee47a5aa40a8b2cda64efc03e92d7cc9e870ac57ad1811c299a"} Sep 30 20:30:42 crc kubenswrapper[4919]: I0930 20:30:42.248612 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-ztsl6" event={"ID":"e6416142-17e9-4398-a65d-10dbcfc06411","Type":"ContainerStarted","Data":"d3e1d50fa60bd4bc18f22bea64a65fa82bbfcf3cf7b91e99f79d0ea5f96f4d5c"} Sep 30 20:30:42 crc kubenswrapper[4919]: I0930 20:30:42.840820 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:42 crc kubenswrapper[4919]: E0930 20:30:42.840992 4919 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 20:30:42 crc kubenswrapper[4919]: E0930 20:30:42.841026 4919 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 20:30:42 crc kubenswrapper[4919]: E0930 20:30:42.841089 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift podName:30462126-2244-47cd-8076-12744196012d nodeName:}" failed. No retries permitted until 2025-09-30 20:30:46.841072089 +0000 UTC m=+1031.957105216 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift") pod "swift-storage-0" (UID: "30462126-2244-47cd-8076-12744196012d") : configmap "swift-ring-files" not found Sep 30 20:30:44 crc kubenswrapper[4919]: I0930 20:30:44.011905 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-ztsl6" Sep 30 20:30:44 crc kubenswrapper[4919]: I0930 20:30:44.061788 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twwb8\" (UniqueName: \"kubernetes.io/projected/e6416142-17e9-4398-a65d-10dbcfc06411-kube-api-access-twwb8\") pod \"e6416142-17e9-4398-a65d-10dbcfc06411\" (UID: \"e6416142-17e9-4398-a65d-10dbcfc06411\") " Sep 30 20:30:44 crc kubenswrapper[4919]: I0930 20:30:44.081762 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6416142-17e9-4398-a65d-10dbcfc06411-kube-api-access-twwb8" (OuterVolumeSpecName: "kube-api-access-twwb8") pod "e6416142-17e9-4398-a65d-10dbcfc06411" (UID: "e6416142-17e9-4398-a65d-10dbcfc06411"). InnerVolumeSpecName "kube-api-access-twwb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:44 crc kubenswrapper[4919]: I0930 20:30:44.164277 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twwb8\" (UniqueName: \"kubernetes.io/projected/e6416142-17e9-4398-a65d-10dbcfc06411-kube-api-access-twwb8\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:44 crc kubenswrapper[4919]: I0930 20:30:44.266466 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-ztsl6" Sep 30 20:30:44 crc kubenswrapper[4919]: I0930 20:30:44.266469 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-ztsl6" event={"ID":"e6416142-17e9-4398-a65d-10dbcfc06411","Type":"ContainerDied","Data":"d3e1d50fa60bd4bc18f22bea64a65fa82bbfcf3cf7b91e99f79d0ea5f96f4d5c"} Sep 30 20:30:44 crc kubenswrapper[4919]: I0930 20:30:44.266876 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3e1d50fa60bd4bc18f22bea64a65fa82bbfcf3cf7b91e99f79d0ea5f96f4d5c" Sep 30 20:30:44 crc kubenswrapper[4919]: I0930 20:30:44.270076 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-h6ch2" event={"ID":"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad","Type":"ContainerStarted","Data":"be11470198a65ddb3151f2d05250f445fa65066cf6ac819b7033b147f752440b"} Sep 30 20:30:44 crc kubenswrapper[4919]: I0930 20:30:44.300032 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-h6ch2" podStartSLOduration=1.693422521 podStartE2EDuration="5.30000358s" podCreationTimestamp="2025-09-30 20:30:39 +0000 UTC" firstStartedPulling="2025-09-30 20:30:40.313132 +0000 UTC m=+1025.429165127" lastFinishedPulling="2025-09-30 20:30:43.919713059 +0000 UTC m=+1029.035746186" observedRunningTime="2025-09-30 20:30:44.293536673 +0000 UTC m=+1029.409569810" watchObservedRunningTime="2025-09-30 20:30:44.30000358 +0000 UTC m=+1029.416036727" Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.520533 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-f2fsn"] Sep 30 20:30:45 crc kubenswrapper[4919]: E0930 20:30:45.526956 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6416142-17e9-4398-a65d-10dbcfc06411" containerName="mariadb-database-create" Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.526986 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6416142-17e9-4398-a65d-10dbcfc06411" containerName="mariadb-database-create" Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.527949 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6416142-17e9-4398-a65d-10dbcfc06411" containerName="mariadb-database-create" Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.529061 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-f2fsn" Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.529300 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-f2fsn"] Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.595304 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4ks7\" (UniqueName: \"kubernetes.io/projected/61966895-d5a4-4b29-8177-623b9f37ae45-kube-api-access-x4ks7\") pod \"keystone-db-create-f2fsn\" (UID: \"61966895-d5a4-4b29-8177-623b9f37ae45\") " pod="openstack/keystone-db-create-f2fsn" Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.631041 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.697475 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4ks7\" (UniqueName: \"kubernetes.io/projected/61966895-d5a4-4b29-8177-623b9f37ae45-kube-api-access-x4ks7\") pod \"keystone-db-create-f2fsn\" (UID: \"61966895-d5a4-4b29-8177-623b9f37ae45\") " pod="openstack/keystone-db-create-f2fsn" Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.732731 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4ks7\" (UniqueName: \"kubernetes.io/projected/61966895-d5a4-4b29-8177-623b9f37ae45-kube-api-access-x4ks7\") pod \"keystone-db-create-f2fsn\" (UID: \"61966895-d5a4-4b29-8177-623b9f37ae45\") " pod="openstack/keystone-db-create-f2fsn" Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.807787 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-9lx6b"] Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.808890 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9lx6b" Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.824572 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9lx6b"] Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.862139 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-f2fsn" Sep 30 20:30:45 crc kubenswrapper[4919]: I0930 20:30:45.901854 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5scz\" (UniqueName: \"kubernetes.io/projected/5ea561dc-1efe-4e77-8b93-690b706e4125-kube-api-access-q5scz\") pod \"placement-db-create-9lx6b\" (UID: \"5ea561dc-1efe-4e77-8b93-690b706e4125\") " pod="openstack/placement-db-create-9lx6b" Sep 30 20:30:46 crc kubenswrapper[4919]: I0930 20:30:46.004060 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5scz\" (UniqueName: \"kubernetes.io/projected/5ea561dc-1efe-4e77-8b93-690b706e4125-kube-api-access-q5scz\") pod \"placement-db-create-9lx6b\" (UID: \"5ea561dc-1efe-4e77-8b93-690b706e4125\") " pod="openstack/placement-db-create-9lx6b" Sep 30 20:30:46 crc kubenswrapper[4919]: I0930 20:30:46.026112 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5scz\" (UniqueName: \"kubernetes.io/projected/5ea561dc-1efe-4e77-8b93-690b706e4125-kube-api-access-q5scz\") pod \"placement-db-create-9lx6b\" (UID: \"5ea561dc-1efe-4e77-8b93-690b706e4125\") " pod="openstack/placement-db-create-9lx6b" Sep 30 20:30:46 crc kubenswrapper[4919]: I0930 20:30:46.128495 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9lx6b" Sep 30 20:30:46 crc kubenswrapper[4919]: I0930 20:30:46.307625 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-f2fsn"] Sep 30 20:30:46 crc kubenswrapper[4919]: W0930 20:30:46.326673 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61966895_d5a4_4b29_8177_623b9f37ae45.slice/crio-7b7a98b39e134bc045ba00299739af924a52dd68f95c9b4744165e8d37d8bf68 WatchSource:0}: Error finding container 7b7a98b39e134bc045ba00299739af924a52dd68f95c9b4744165e8d37d8bf68: Status 404 returned error can't find the container with id 7b7a98b39e134bc045ba00299739af924a52dd68f95c9b4744165e8d37d8bf68 Sep 30 20:30:46 crc kubenswrapper[4919]: I0930 20:30:46.342132 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-f2fsn" event={"ID":"61966895-d5a4-4b29-8177-623b9f37ae45","Type":"ContainerStarted","Data":"7b7a98b39e134bc045ba00299739af924a52dd68f95c9b4744165e8d37d8bf68"} Sep 30 20:30:46 crc kubenswrapper[4919]: I0930 20:30:46.616747 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9lx6b"] Sep 30 20:30:46 crc kubenswrapper[4919]: I0930 20:30:46.921031 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:46 crc kubenswrapper[4919]: E0930 20:30:46.921285 4919 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 30 20:30:46 crc kubenswrapper[4919]: E0930 20:30:46.921317 4919 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 30 20:30:46 crc kubenswrapper[4919]: E0930 20:30:46.921378 4919 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift podName:30462126-2244-47cd-8076-12744196012d nodeName:}" failed. No retries permitted until 2025-09-30 20:30:54.921360964 +0000 UTC m=+1040.037394091 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift") pod "swift-storage-0" (UID: "30462126-2244-47cd-8076-12744196012d") : configmap "swift-ring-files" not found Sep 30 20:30:47 crc kubenswrapper[4919]: I0930 20:30:47.353685 4919 generic.go:334] "Generic (PLEG): container finished" podID="5ea561dc-1efe-4e77-8b93-690b706e4125" containerID="be303333ecd730045e05fec9bf10282740e6342a1d9d2a57cf045b4d90b731e1" exitCode=0 Sep 30 20:30:47 crc kubenswrapper[4919]: I0930 20:30:47.353764 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9lx6b" event={"ID":"5ea561dc-1efe-4e77-8b93-690b706e4125","Type":"ContainerDied","Data":"be303333ecd730045e05fec9bf10282740e6342a1d9d2a57cf045b4d90b731e1"} Sep 30 20:30:47 crc kubenswrapper[4919]: I0930 20:30:47.354036 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9lx6b" event={"ID":"5ea561dc-1efe-4e77-8b93-690b706e4125","Type":"ContainerStarted","Data":"e4582d0b0c68e99ddcc3dd716ee3a5e54e818251cb1f1826c57741a60ca53e68"} Sep 30 20:30:47 crc kubenswrapper[4919]: I0930 20:30:47.356921 4919 generic.go:334] "Generic (PLEG): container finished" podID="61966895-d5a4-4b29-8177-623b9f37ae45" containerID="63763b0f143e7d86d43bc46699ea91cd4ea82bfe9a534f66ef05b06aad53737d" exitCode=0 Sep 30 20:30:47 crc kubenswrapper[4919]: I0930 20:30:47.356975 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-f2fsn" event={"ID":"61966895-d5a4-4b29-8177-623b9f37ae45","Type":"ContainerDied","Data":"63763b0f143e7d86d43bc46699ea91cd4ea82bfe9a534f66ef05b06aad53737d"} Sep 30 20:30:48 crc kubenswrapper[4919]: I0930 20:30:48.258384 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:30:48 crc kubenswrapper[4919]: I0930 20:30:48.321121 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-lkb6x"] Sep 30 20:30:48 crc kubenswrapper[4919]: I0930 20:30:48.321467 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" podUID="fedb355b-baae-4ce0-b68b-041578c10496" containerName="dnsmasq-dns" containerID="cri-o://8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1" gracePeriod=10 Sep 30 20:30:48 crc kubenswrapper[4919]: I0930 20:30:48.669547 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9lx6b" Sep 30 20:30:48 crc kubenswrapper[4919]: I0930 20:30:48.869922 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5scz\" (UniqueName: \"kubernetes.io/projected/5ea561dc-1efe-4e77-8b93-690b706e4125-kube-api-access-q5scz\") pod \"5ea561dc-1efe-4e77-8b93-690b706e4125\" (UID: \"5ea561dc-1efe-4e77-8b93-690b706e4125\") " Sep 30 20:30:48 crc kubenswrapper[4919]: I0930 20:30:48.876920 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ea561dc-1efe-4e77-8b93-690b706e4125-kube-api-access-q5scz" (OuterVolumeSpecName: "kube-api-access-q5scz") pod "5ea561dc-1efe-4e77-8b93-690b706e4125" (UID: "5ea561dc-1efe-4e77-8b93-690b706e4125"). InnerVolumeSpecName "kube-api-access-q5scz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:48 crc kubenswrapper[4919]: I0930 20:30:48.900424 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-f2fsn" Sep 30 20:30:48 crc kubenswrapper[4919]: I0930 20:30:48.911867 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:48 crc kubenswrapper[4919]: I0930 20:30:48.972543 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5scz\" (UniqueName: \"kubernetes.io/projected/5ea561dc-1efe-4e77-8b93-690b706e4125-kube-api-access-q5scz\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.073089 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-dns-svc\") pod \"fedb355b-baae-4ce0-b68b-041578c10496\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.073124 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4ks7\" (UniqueName: \"kubernetes.io/projected/61966895-d5a4-4b29-8177-623b9f37ae45-kube-api-access-x4ks7\") pod \"61966895-d5a4-4b29-8177-623b9f37ae45\" (UID: \"61966895-d5a4-4b29-8177-623b9f37ae45\") " Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.073202 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-config\") pod \"fedb355b-baae-4ce0-b68b-041578c10496\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.073231 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-ovsdbserver-nb\") pod \"fedb355b-baae-4ce0-b68b-041578c10496\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.073253 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5hqw\" (UniqueName: \"kubernetes.io/projected/fedb355b-baae-4ce0-b68b-041578c10496-kube-api-access-c5hqw\") pod \"fedb355b-baae-4ce0-b68b-041578c10496\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.073337 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-ovsdbserver-sb\") pod \"fedb355b-baae-4ce0-b68b-041578c10496\" (UID: \"fedb355b-baae-4ce0-b68b-041578c10496\") " Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.077122 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fedb355b-baae-4ce0-b68b-041578c10496-kube-api-access-c5hqw" (OuterVolumeSpecName: "kube-api-access-c5hqw") pod "fedb355b-baae-4ce0-b68b-041578c10496" (UID: "fedb355b-baae-4ce0-b68b-041578c10496"). InnerVolumeSpecName "kube-api-access-c5hqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.077360 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61966895-d5a4-4b29-8177-623b9f37ae45-kube-api-access-x4ks7" (OuterVolumeSpecName: "kube-api-access-x4ks7") pod "61966895-d5a4-4b29-8177-623b9f37ae45" (UID: "61966895-d5a4-4b29-8177-623b9f37ae45"). InnerVolumeSpecName "kube-api-access-x4ks7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.112328 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fedb355b-baae-4ce0-b68b-041578c10496" (UID: "fedb355b-baae-4ce0-b68b-041578c10496"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.115872 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fedb355b-baae-4ce0-b68b-041578c10496" (UID: "fedb355b-baae-4ce0-b68b-041578c10496"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.119283 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fedb355b-baae-4ce0-b68b-041578c10496" (UID: "fedb355b-baae-4ce0-b68b-041578c10496"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.123155 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-config" (OuterVolumeSpecName: "config") pod "fedb355b-baae-4ce0-b68b-041578c10496" (UID: "fedb355b-baae-4ce0-b68b-041578c10496"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.175622 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.175672 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.175695 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5hqw\" (UniqueName: \"kubernetes.io/projected/fedb355b-baae-4ce0-b68b-041578c10496-kube-api-access-c5hqw\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.175716 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.175734 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fedb355b-baae-4ce0-b68b-041578c10496-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.175751 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4ks7\" (UniqueName: \"kubernetes.io/projected/61966895-d5a4-4b29-8177-623b9f37ae45-kube-api-access-x4ks7\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.374505 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9lx6b" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.374502 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9lx6b" event={"ID":"5ea561dc-1efe-4e77-8b93-690b706e4125","Type":"ContainerDied","Data":"e4582d0b0c68e99ddcc3dd716ee3a5e54e818251cb1f1826c57741a60ca53e68"} Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.375128 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4582d0b0c68e99ddcc3dd716ee3a5e54e818251cb1f1826c57741a60ca53e68" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.380291 4919 generic.go:334] "Generic (PLEG): container finished" podID="fedb355b-baae-4ce0-b68b-041578c10496" containerID="8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1" exitCode=0 Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.380347 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" event={"ID":"fedb355b-baae-4ce0-b68b-041578c10496","Type":"ContainerDied","Data":"8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1"} Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.380408 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" event={"ID":"fedb355b-baae-4ce0-b68b-041578c10496","Type":"ContainerDied","Data":"ed84059fe090d310171685c933038718606cc8ebccf2e37b558360799a25e653"} Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.380438 4919 scope.go:117] "RemoveContainer" containerID="8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.381345 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-lkb6x" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.390870 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-f2fsn" event={"ID":"61966895-d5a4-4b29-8177-623b9f37ae45","Type":"ContainerDied","Data":"7b7a98b39e134bc045ba00299739af924a52dd68f95c9b4744165e8d37d8bf68"} Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.390974 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b7a98b39e134bc045ba00299739af924a52dd68f95c9b4744165e8d37d8bf68" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.391190 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-f2fsn" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.436894 4919 scope.go:117] "RemoveContainer" containerID="b724ec8fa528274a588503d7d1d9e8117230692bbbc7d4c0b09ed014ec9b0b5d" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.456347 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-lkb6x"] Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.461516 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-lkb6x"] Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.514609 4919 scope.go:117] "RemoveContainer" containerID="8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1" Sep 30 20:30:49 crc kubenswrapper[4919]: E0930 20:30:49.515105 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1\": container with ID starting with 8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1 not found: ID does not exist" containerID="8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.515141 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1"} err="failed to get container status \"8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1\": rpc error: code = NotFound desc = could not find container \"8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1\": container with ID starting with 8265206e4680837c9161dba0416dac9bc3e6f415d51c027650613e74a3a7d5c1 not found: ID does not exist" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.515167 4919 scope.go:117] "RemoveContainer" containerID="b724ec8fa528274a588503d7d1d9e8117230692bbbc7d4c0b09ed014ec9b0b5d" Sep 30 20:30:49 crc kubenswrapper[4919]: E0930 20:30:49.515408 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b724ec8fa528274a588503d7d1d9e8117230692bbbc7d4c0b09ed014ec9b0b5d\": container with ID starting with b724ec8fa528274a588503d7d1d9e8117230692bbbc7d4c0b09ed014ec9b0b5d not found: ID does not exist" containerID="b724ec8fa528274a588503d7d1d9e8117230692bbbc7d4c0b09ed014ec9b0b5d" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.515433 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b724ec8fa528274a588503d7d1d9e8117230692bbbc7d4c0b09ed014ec9b0b5d"} err="failed to get container status \"b724ec8fa528274a588503d7d1d9e8117230692bbbc7d4c0b09ed014ec9b0b5d\": rpc error: code = NotFound desc = could not find container \"b724ec8fa528274a588503d7d1d9e8117230692bbbc7d4c0b09ed014ec9b0b5d\": container with ID starting with b724ec8fa528274a588503d7d1d9e8117230692bbbc7d4c0b09ed014ec9b0b5d not found: ID does not exist" Sep 30 20:30:49 crc kubenswrapper[4919]: I0930 20:30:49.646891 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fedb355b-baae-4ce0-b68b-041578c10496" path="/var/lib/kubelet/pods/fedb355b-baae-4ce0-b68b-041578c10496/volumes" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.282393 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-1482-account-create-lz8v2"] Sep 30 20:30:51 crc kubenswrapper[4919]: E0930 20:30:51.282989 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fedb355b-baae-4ce0-b68b-041578c10496" containerName="init" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.283002 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="fedb355b-baae-4ce0-b68b-041578c10496" containerName="init" Sep 30 20:30:51 crc kubenswrapper[4919]: E0930 20:30:51.283014 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61966895-d5a4-4b29-8177-623b9f37ae45" containerName="mariadb-database-create" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.283020 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="61966895-d5a4-4b29-8177-623b9f37ae45" containerName="mariadb-database-create" Sep 30 20:30:51 crc kubenswrapper[4919]: E0930 20:30:51.283036 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ea561dc-1efe-4e77-8b93-690b706e4125" containerName="mariadb-database-create" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.283041 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ea561dc-1efe-4e77-8b93-690b706e4125" containerName="mariadb-database-create" Sep 30 20:30:51 crc kubenswrapper[4919]: E0930 20:30:51.283065 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fedb355b-baae-4ce0-b68b-041578c10496" containerName="dnsmasq-dns" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.283070 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="fedb355b-baae-4ce0-b68b-041578c10496" containerName="dnsmasq-dns" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.283227 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="61966895-d5a4-4b29-8177-623b9f37ae45" containerName="mariadb-database-create" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.283268 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ea561dc-1efe-4e77-8b93-690b706e4125" containerName="mariadb-database-create" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.283281 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="fedb355b-baae-4ce0-b68b-041578c10496" containerName="dnsmasq-dns" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.283769 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1482-account-create-lz8v2" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.287388 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.313117 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-1482-account-create-lz8v2"] Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.320758 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx65b\" (UniqueName: \"kubernetes.io/projected/4041a194-2042-411c-b58b-bec5e4ef9f2d-kube-api-access-fx65b\") pod \"glance-1482-account-create-lz8v2\" (UID: \"4041a194-2042-411c-b58b-bec5e4ef9f2d\") " pod="openstack/glance-1482-account-create-lz8v2" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.410118 4919 generic.go:334] "Generic (PLEG): container finished" podID="c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad" containerID="be11470198a65ddb3151f2d05250f445fa65066cf6ac819b7033b147f752440b" exitCode=0 Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.410161 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-h6ch2" event={"ID":"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad","Type":"ContainerDied","Data":"be11470198a65ddb3151f2d05250f445fa65066cf6ac819b7033b147f752440b"} Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.425950 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx65b\" (UniqueName: \"kubernetes.io/projected/4041a194-2042-411c-b58b-bec5e4ef9f2d-kube-api-access-fx65b\") pod \"glance-1482-account-create-lz8v2\" (UID: \"4041a194-2042-411c-b58b-bec5e4ef9f2d\") " pod="openstack/glance-1482-account-create-lz8v2" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.450585 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx65b\" (UniqueName: \"kubernetes.io/projected/4041a194-2042-411c-b58b-bec5e4ef9f2d-kube-api-access-fx65b\") pod \"glance-1482-account-create-lz8v2\" (UID: \"4041a194-2042-411c-b58b-bec5e4ef9f2d\") " pod="openstack/glance-1482-account-create-lz8v2" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.606346 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1482-account-create-lz8v2" Sep 30 20:30:51 crc kubenswrapper[4919]: I0930 20:30:51.909983 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-1482-account-create-lz8v2"] Sep 30 20:30:51 crc kubenswrapper[4919]: W0930 20:30:51.913077 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4041a194_2042_411c_b58b_bec5e4ef9f2d.slice/crio-126d5ca1d459cdeff4bcdcd8dc1625b14a8bd35f496afb27fd395cbb1ae41735 WatchSource:0}: Error finding container 126d5ca1d459cdeff4bcdcd8dc1625b14a8bd35f496afb27fd395cbb1ae41735: Status 404 returned error can't find the container with id 126d5ca1d459cdeff4bcdcd8dc1625b14a8bd35f496afb27fd395cbb1ae41735 Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.423400 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1482-account-create-lz8v2" event={"ID":"4041a194-2042-411c-b58b-bec5e4ef9f2d","Type":"ContainerStarted","Data":"126d5ca1d459cdeff4bcdcd8dc1625b14a8bd35f496afb27fd395cbb1ae41735"} Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.855067 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.954509 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-dispersionconf\") pod \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.954820 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjn2k\" (UniqueName: \"kubernetes.io/projected/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-kube-api-access-fjn2k\") pod \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.954883 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-combined-ca-bundle\") pod \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.954953 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-ring-data-devices\") pod \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.955008 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-swiftconf\") pod \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.955123 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-etc-swift\") pod \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.955173 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-scripts\") pod \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\" (UID: \"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad\") " Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.956171 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad" (UID: "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.957095 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad" (UID: "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.960597 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-kube-api-access-fjn2k" (OuterVolumeSpecName: "kube-api-access-fjn2k") pod "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad" (UID: "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad"). InnerVolumeSpecName "kube-api-access-fjn2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.964877 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad" (UID: "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.977758 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-scripts" (OuterVolumeSpecName: "scripts") pod "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad" (UID: "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.980719 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad" (UID: "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:30:52 crc kubenswrapper[4919]: I0930 20:30:52.995980 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad" (UID: "c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:30:53 crc kubenswrapper[4919]: I0930 20:30:53.057185 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjn2k\" (UniqueName: \"kubernetes.io/projected/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-kube-api-access-fjn2k\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:53 crc kubenswrapper[4919]: I0930 20:30:53.057261 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:53 crc kubenswrapper[4919]: I0930 20:30:53.057288 4919 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:53 crc kubenswrapper[4919]: I0930 20:30:53.057313 4919 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:53 crc kubenswrapper[4919]: I0930 20:30:53.057338 4919 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:53 crc kubenswrapper[4919]: I0930 20:30:53.057359 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:53 crc kubenswrapper[4919]: I0930 20:30:53.057382 4919 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:53 crc kubenswrapper[4919]: I0930 20:30:53.437881 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-h6ch2" event={"ID":"c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad","Type":"ContainerDied","Data":"463908def5a34ab631c5a245d09ea9f068853101648e21d88bf7ccc6d93a7ee9"} Sep 30 20:30:53 crc kubenswrapper[4919]: I0930 20:30:53.438368 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="463908def5a34ab631c5a245d09ea9f068853101648e21d88bf7ccc6d93a7ee9" Sep 30 20:30:53 crc kubenswrapper[4919]: I0930 20:30:53.437966 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-h6ch2" Sep 30 20:30:54 crc kubenswrapper[4919]: I0930 20:30:54.449343 4919 generic.go:334] "Generic (PLEG): container finished" podID="4041a194-2042-411c-b58b-bec5e4ef9f2d" containerID="679d3ff46779fc82e3def6d6fabf19e93627992c0c31b657073b2839d2708a5a" exitCode=0 Sep 30 20:30:54 crc kubenswrapper[4919]: I0930 20:30:54.450097 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1482-account-create-lz8v2" event={"ID":"4041a194-2042-411c-b58b-bec5e4ef9f2d","Type":"ContainerDied","Data":"679d3ff46779fc82e3def6d6fabf19e93627992c0c31b657073b2839d2708a5a"} Sep 30 20:30:54 crc kubenswrapper[4919]: I0930 20:30:54.986501 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:54 crc kubenswrapper[4919]: I0930 20:30:54.997489 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/30462126-2244-47cd-8076-12744196012d-etc-swift\") pod \"swift-storage-0\" (UID: \"30462126-2244-47cd-8076-12744196012d\") " pod="openstack/swift-storage-0" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.248681 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.652909 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-f84d-account-create-88k77"] Sep 30 20:30:55 crc kubenswrapper[4919]: E0930 20:30:55.653293 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad" containerName="swift-ring-rebalance" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.653305 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad" containerName="swift-ring-rebalance" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.653473 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad" containerName="swift-ring-rebalance" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.653999 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f84d-account-create-88k77" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.658288 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f84d-account-create-88k77"] Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.659023 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.804310 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7vfl\" (UniqueName: \"kubernetes.io/projected/63edfbc5-0bf7-48c8-87c3-94874e37e8d7-kube-api-access-w7vfl\") pod \"keystone-f84d-account-create-88k77\" (UID: \"63edfbc5-0bf7-48c8-87c3-94874e37e8d7\") " pod="openstack/keystone-f84d-account-create-88k77" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.819398 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.823384 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1482-account-create-lz8v2" Sep 30 20:30:55 crc kubenswrapper[4919]: W0930 20:30:55.824094 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30462126_2244_47cd_8076_12744196012d.slice/crio-b072913af456bf6319f70640821f0ae3e5c8d15ab9d3e8bbeca76599f8579032 WatchSource:0}: Error finding container b072913af456bf6319f70640821f0ae3e5c8d15ab9d3e8bbeca76599f8579032: Status 404 returned error can't find the container with id b072913af456bf6319f70640821f0ae3e5c8d15ab9d3e8bbeca76599f8579032 Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.905772 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fx65b\" (UniqueName: \"kubernetes.io/projected/4041a194-2042-411c-b58b-bec5e4ef9f2d-kube-api-access-fx65b\") pod \"4041a194-2042-411c-b58b-bec5e4ef9f2d\" (UID: \"4041a194-2042-411c-b58b-bec5e4ef9f2d\") " Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.906181 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7vfl\" (UniqueName: \"kubernetes.io/projected/63edfbc5-0bf7-48c8-87c3-94874e37e8d7-kube-api-access-w7vfl\") pod \"keystone-f84d-account-create-88k77\" (UID: \"63edfbc5-0bf7-48c8-87c3-94874e37e8d7\") " pod="openstack/keystone-f84d-account-create-88k77" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.912818 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4041a194-2042-411c-b58b-bec5e4ef9f2d-kube-api-access-fx65b" (OuterVolumeSpecName: "kube-api-access-fx65b") pod "4041a194-2042-411c-b58b-bec5e4ef9f2d" (UID: "4041a194-2042-411c-b58b-bec5e4ef9f2d"). InnerVolumeSpecName "kube-api-access-fx65b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.925827 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7vfl\" (UniqueName: \"kubernetes.io/projected/63edfbc5-0bf7-48c8-87c3-94874e37e8d7-kube-api-access-w7vfl\") pod \"keystone-f84d-account-create-88k77\" (UID: \"63edfbc5-0bf7-48c8-87c3-94874e37e8d7\") " pod="openstack/keystone-f84d-account-create-88k77" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.946515 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-088d-account-create-lh9jg"] Sep 30 20:30:55 crc kubenswrapper[4919]: E0930 20:30:55.946885 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4041a194-2042-411c-b58b-bec5e4ef9f2d" containerName="mariadb-account-create" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.946903 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="4041a194-2042-411c-b58b-bec5e4ef9f2d" containerName="mariadb-account-create" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.947055 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="4041a194-2042-411c-b58b-bec5e4ef9f2d" containerName="mariadb-account-create" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.947607 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-088d-account-create-lh9jg" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.949920 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.956559 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-088d-account-create-lh9jg"] Sep 30 20:30:55 crc kubenswrapper[4919]: I0930 20:30:55.977976 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f84d-account-create-88k77" Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.008186 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fx65b\" (UniqueName: \"kubernetes.io/projected/4041a194-2042-411c-b58b-bec5e4ef9f2d-kube-api-access-fx65b\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.110337 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br9rh\" (UniqueName: \"kubernetes.io/projected/2b424fd3-c3b3-4bbf-8583-9e5788c0038b-kube-api-access-br9rh\") pod \"placement-088d-account-create-lh9jg\" (UID: \"2b424fd3-c3b3-4bbf-8583-9e5788c0038b\") " pod="openstack/placement-088d-account-create-lh9jg" Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.211979 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br9rh\" (UniqueName: \"kubernetes.io/projected/2b424fd3-c3b3-4bbf-8583-9e5788c0038b-kube-api-access-br9rh\") pod \"placement-088d-account-create-lh9jg\" (UID: \"2b424fd3-c3b3-4bbf-8583-9e5788c0038b\") " pod="openstack/placement-088d-account-create-lh9jg" Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.233318 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br9rh\" (UniqueName: \"kubernetes.io/projected/2b424fd3-c3b3-4bbf-8583-9e5788c0038b-kube-api-access-br9rh\") pod \"placement-088d-account-create-lh9jg\" (UID: \"2b424fd3-c3b3-4bbf-8583-9e5788c0038b\") " pod="openstack/placement-088d-account-create-lh9jg" Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.269869 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-088d-account-create-lh9jg" Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.336468 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-jx7tr" podUID="8b4d1dc0-4d24-4128-a83b-9f37e7356309" containerName="ovn-controller" probeResult="failure" output=< Sep 30 20:30:56 crc kubenswrapper[4919]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 20:30:56 crc kubenswrapper[4919]: > Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.339149 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.392761 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-f84d-account-create-88k77"] Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.468814 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-1482-account-create-lz8v2" Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.468835 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-1482-account-create-lz8v2" event={"ID":"4041a194-2042-411c-b58b-bec5e4ef9f2d","Type":"ContainerDied","Data":"126d5ca1d459cdeff4bcdcd8dc1625b14a8bd35f496afb27fd395cbb1ae41735"} Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.468877 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="126d5ca1d459cdeff4bcdcd8dc1625b14a8bd35f496afb27fd395cbb1ae41735" Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.480929 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"b072913af456bf6319f70640821f0ae3e5c8d15ab9d3e8bbeca76599f8579032"} Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.481874 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f84d-account-create-88k77" event={"ID":"63edfbc5-0bf7-48c8-87c3-94874e37e8d7","Type":"ContainerStarted","Data":"2463aa2a673b96e8add5f6a8d8feed49978c4bf8aa0e489c33211dd81cff76e0"} Sep 30 20:30:56 crc kubenswrapper[4919]: I0930 20:30:56.683927 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-088d-account-create-lh9jg"] Sep 30 20:30:57 crc kubenswrapper[4919]: I0930 20:30:57.499078 4919 generic.go:334] "Generic (PLEG): container finished" podID="63edfbc5-0bf7-48c8-87c3-94874e37e8d7" containerID="87bf306e2d40a1cd713990bd45fe106cef7d87b6b8be2220edad388b98e99617" exitCode=0 Sep 30 20:30:57 crc kubenswrapper[4919]: I0930 20:30:57.499351 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f84d-account-create-88k77" event={"ID":"63edfbc5-0bf7-48c8-87c3-94874e37e8d7","Type":"ContainerDied","Data":"87bf306e2d40a1cd713990bd45fe106cef7d87b6b8be2220edad388b98e99617"} Sep 30 20:30:57 crc kubenswrapper[4919]: I0930 20:30:57.503542 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-088d-account-create-lh9jg" event={"ID":"2b424fd3-c3b3-4bbf-8583-9e5788c0038b","Type":"ContainerStarted","Data":"f18973f3827b25a2c80ff87518e688a36ad51681aab6443b2d3f3241a54dbae1"} Sep 30 20:30:57 crc kubenswrapper[4919]: I0930 20:30:57.503624 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-088d-account-create-lh9jg" event={"ID":"2b424fd3-c3b3-4bbf-8583-9e5788c0038b","Type":"ContainerStarted","Data":"88c2342e065be5380d716df8ee28b0c5c9cd9fbd32b204f4a1c0b51176adf778"} Sep 30 20:30:57 crc kubenswrapper[4919]: I0930 20:30:57.507252 4919 generic.go:334] "Generic (PLEG): container finished" podID="831f0cec-e526-41e4-851f-139ffef9bea5" containerID="93e7c638bdaeea46130f5231c2e87f66af1eac382e728abedd402b1d49dd981f" exitCode=0 Sep 30 20:30:57 crc kubenswrapper[4919]: I0930 20:30:57.507331 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"831f0cec-e526-41e4-851f-139ffef9bea5","Type":"ContainerDied","Data":"93e7c638bdaeea46130f5231c2e87f66af1eac382e728abedd402b1d49dd981f"} Sep 30 20:30:57 crc kubenswrapper[4919]: I0930 20:30:57.601043 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-088d-account-create-lh9jg" podStartSLOduration=2.601016691 podStartE2EDuration="2.601016691s" podCreationTimestamp="2025-09-30 20:30:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:30:57.590675755 +0000 UTC m=+1042.706708922" watchObservedRunningTime="2025-09-30 20:30:57.601016691 +0000 UTC m=+1042.717049818" Sep 30 20:30:58 crc kubenswrapper[4919]: I0930 20:30:58.533305 4919 generic.go:334] "Generic (PLEG): container finished" podID="2b424fd3-c3b3-4bbf-8583-9e5788c0038b" containerID="f18973f3827b25a2c80ff87518e688a36ad51681aab6443b2d3f3241a54dbae1" exitCode=0 Sep 30 20:30:58 crc kubenswrapper[4919]: I0930 20:30:58.534159 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-088d-account-create-lh9jg" event={"ID":"2b424fd3-c3b3-4bbf-8583-9e5788c0038b","Type":"ContainerDied","Data":"f18973f3827b25a2c80ff87518e688a36ad51681aab6443b2d3f3241a54dbae1"} Sep 30 20:30:58 crc kubenswrapper[4919]: I0930 20:30:58.862831 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f84d-account-create-88k77" Sep 30 20:30:58 crc kubenswrapper[4919]: I0930 20:30:58.960916 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7vfl\" (UniqueName: \"kubernetes.io/projected/63edfbc5-0bf7-48c8-87c3-94874e37e8d7-kube-api-access-w7vfl\") pod \"63edfbc5-0bf7-48c8-87c3-94874e37e8d7\" (UID: \"63edfbc5-0bf7-48c8-87c3-94874e37e8d7\") " Sep 30 20:30:58 crc kubenswrapper[4919]: I0930 20:30:58.965497 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63edfbc5-0bf7-48c8-87c3-94874e37e8d7-kube-api-access-w7vfl" (OuterVolumeSpecName: "kube-api-access-w7vfl") pod "63edfbc5-0bf7-48c8-87c3-94874e37e8d7" (UID: "63edfbc5-0bf7-48c8-87c3-94874e37e8d7"). InnerVolumeSpecName "kube-api-access-w7vfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.063511 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7vfl\" (UniqueName: \"kubernetes.io/projected/63edfbc5-0bf7-48c8-87c3-94874e37e8d7-kube-api-access-w7vfl\") on node \"crc\" DevicePath \"\"" Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.546635 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-f84d-account-create-88k77" event={"ID":"63edfbc5-0bf7-48c8-87c3-94874e37e8d7","Type":"ContainerDied","Data":"2463aa2a673b96e8add5f6a8d8feed49978c4bf8aa0e489c33211dd81cff76e0"} Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.546924 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2463aa2a673b96e8add5f6a8d8feed49978c4bf8aa0e489c33211dd81cff76e0" Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.547002 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-f84d-account-create-88k77" Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.550912 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"831f0cec-e526-41e4-851f-139ffef9bea5","Type":"ContainerStarted","Data":"38a5d3175d1cc4769ad239682d75e814c0498c53fc9db30bea0240c22b357552"} Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.551952 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.555150 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"e7ecc5be04bd78389a27c2c27bc43cdeba257f77da47df6615c4bcf82b19b572"} Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.555202 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"43802aaf824420c299090672af8e9b98d6c3b7a5b85c0fcef0a8cd8299cff4fc"} Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.555279 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"fe13291729fb639fec4e2e90321dedb79802fc4f4ebeea143b705b4ec68b1daa"} Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.555294 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"84438bc813c1de65ffe695baf1ffe9ddc82615642ebbc00a0553f8fc60332e2d"} Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.579583 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=39.270469346 podStartE2EDuration="1m8.57930455s" podCreationTimestamp="2025-09-30 20:29:51 +0000 UTC" firstStartedPulling="2025-09-30 20:29:52.975320383 +0000 UTC m=+978.091353510" lastFinishedPulling="2025-09-30 20:30:22.284155547 +0000 UTC m=+1007.400188714" observedRunningTime="2025-09-30 20:30:59.57268193 +0000 UTC m=+1044.688715057" watchObservedRunningTime="2025-09-30 20:30:59.57930455 +0000 UTC m=+1044.695337677" Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.907343 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-088d-account-create-lh9jg" Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.990017 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br9rh\" (UniqueName: \"kubernetes.io/projected/2b424fd3-c3b3-4bbf-8583-9e5788c0038b-kube-api-access-br9rh\") pod \"2b424fd3-c3b3-4bbf-8583-9e5788c0038b\" (UID: \"2b424fd3-c3b3-4bbf-8583-9e5788c0038b\") " Sep 30 20:30:59 crc kubenswrapper[4919]: I0930 20:30:59.998563 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b424fd3-c3b3-4bbf-8583-9e5788c0038b-kube-api-access-br9rh" (OuterVolumeSpecName: "kube-api-access-br9rh") pod "2b424fd3-c3b3-4bbf-8583-9e5788c0038b" (UID: "2b424fd3-c3b3-4bbf-8583-9e5788c0038b"). InnerVolumeSpecName "kube-api-access-br9rh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:00 crc kubenswrapper[4919]: I0930 20:31:00.091428 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br9rh\" (UniqueName: \"kubernetes.io/projected/2b424fd3-c3b3-4bbf-8583-9e5788c0038b-kube-api-access-br9rh\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:00 crc kubenswrapper[4919]: I0930 20:31:00.575343 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"575bcb491a3781e7d72253b9e76d6ab3f66660f87be15b868b7373f668290cc3"} Sep 30 20:31:00 crc kubenswrapper[4919]: I0930 20:31:00.579455 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-088d-account-create-lh9jg" Sep 30 20:31:00 crc kubenswrapper[4919]: I0930 20:31:00.579526 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-088d-account-create-lh9jg" event={"ID":"2b424fd3-c3b3-4bbf-8583-9e5788c0038b","Type":"ContainerDied","Data":"88c2342e065be5380d716df8ee28b0c5c9cd9fbd32b204f4a1c0b51176adf778"} Sep 30 20:31:00 crc kubenswrapper[4919]: I0930 20:31:00.579559 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88c2342e065be5380d716df8ee28b0c5c9cd9fbd32b204f4a1c0b51176adf778" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.342568 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-jx7tr" podUID="8b4d1dc0-4d24-4128-a83b-9f37e7356309" containerName="ovn-controller" probeResult="failure" output=< Sep 30 20:31:01 crc kubenswrapper[4919]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 30 20:31:01 crc kubenswrapper[4919]: > Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.349674 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-ppjcf" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.438176 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-8zx8k"] Sep 30 20:31:01 crc kubenswrapper[4919]: E0930 20:31:01.438519 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63edfbc5-0bf7-48c8-87c3-94874e37e8d7" containerName="mariadb-account-create" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.438535 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="63edfbc5-0bf7-48c8-87c3-94874e37e8d7" containerName="mariadb-account-create" Sep 30 20:31:01 crc kubenswrapper[4919]: E0930 20:31:01.438552 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b424fd3-c3b3-4bbf-8583-9e5788c0038b" containerName="mariadb-account-create" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.438558 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b424fd3-c3b3-4bbf-8583-9e5788c0038b" containerName="mariadb-account-create" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.438725 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b424fd3-c3b3-4bbf-8583-9e5788c0038b" containerName="mariadb-account-create" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.438742 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="63edfbc5-0bf7-48c8-87c3-94874e37e8d7" containerName="mariadb-account-create" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.439274 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.441560 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.441794 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-gmsjp" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.453427 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-8zx8k"] Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.517810 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-config-data\") pod \"glance-db-sync-8zx8k\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.517895 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-combined-ca-bundle\") pod \"glance-db-sync-8zx8k\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.517926 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8tqq\" (UniqueName: \"kubernetes.io/projected/15afefc9-4042-464a-ae52-966e5b6f0ffb-kube-api-access-t8tqq\") pod \"glance-db-sync-8zx8k\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.517951 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-db-sync-config-data\") pod \"glance-db-sync-8zx8k\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.591529 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"3bb4d71ab351e6a9047615d07ebca62d3bba749ac62accdbfd20e9b65a080af8"} Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.591571 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"02d24964afff60e8fbd06fe2d55dc87f53efcca7bc3f4580366c771948e921ec"} Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.591583 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"e492571d79a8f8fd81e72d805ff3f4e5b0f00655e6783cb9d961d48e00688c1d"} Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.593317 4919 generic.go:334] "Generic (PLEG): container finished" podID="567de3cf-1a4f-426d-b4d5-da78ead6e923" containerID="2a760f9f87e08a592631ecf86976cc2522b7c1236f4dead9c79de8addb7bc69a" exitCode=0 Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.593400 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"567de3cf-1a4f-426d-b4d5-da78ead6e923","Type":"ContainerDied","Data":"2a760f9f87e08a592631ecf86976cc2522b7c1236f4dead9c79de8addb7bc69a"} Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.620208 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-config-data\") pod \"glance-db-sync-8zx8k\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.620297 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-combined-ca-bundle\") pod \"glance-db-sync-8zx8k\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.620331 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8tqq\" (UniqueName: \"kubernetes.io/projected/15afefc9-4042-464a-ae52-966e5b6f0ffb-kube-api-access-t8tqq\") pod \"glance-db-sync-8zx8k\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.620359 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-db-sync-config-data\") pod \"glance-db-sync-8zx8k\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.621512 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jx7tr-config-rgz54"] Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.622799 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.625037 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-combined-ca-bundle\") pod \"glance-db-sync-8zx8k\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.625608 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-config-data\") pod \"glance-db-sync-8zx8k\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.627769 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.634399 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-db-sync-config-data\") pod \"glance-db-sync-8zx8k\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.646746 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8tqq\" (UniqueName: \"kubernetes.io/projected/15afefc9-4042-464a-ae52-966e5b6f0ffb-kube-api-access-t8tqq\") pod \"glance-db-sync-8zx8k\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.663124 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jx7tr-config-rgz54"] Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.722015 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-log-ovn\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.722093 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-run\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.722169 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqcb7\" (UniqueName: \"kubernetes.io/projected/fc21a491-20da-4b24-aa0b-e7a2c9545445-kube-api-access-nqcb7\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.722223 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fc21a491-20da-4b24-aa0b-e7a2c9545445-additional-scripts\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.722287 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-run-ovn\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.722360 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc21a491-20da-4b24-aa0b-e7a2c9545445-scripts\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.771192 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.823958 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc21a491-20da-4b24-aa0b-e7a2c9545445-scripts\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.824059 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-log-ovn\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.824089 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-run\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.824134 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqcb7\" (UniqueName: \"kubernetes.io/projected/fc21a491-20da-4b24-aa0b-e7a2c9545445-kube-api-access-nqcb7\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.824171 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fc21a491-20da-4b24-aa0b-e7a2c9545445-additional-scripts\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.824328 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-run-ovn\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.824732 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-run-ovn\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.824819 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-log-ovn\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.825193 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-run\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.826000 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fc21a491-20da-4b24-aa0b-e7a2c9545445-additional-scripts\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.827013 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc21a491-20da-4b24-aa0b-e7a2c9545445-scripts\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:01 crc kubenswrapper[4919]: I0930 20:31:01.848564 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqcb7\" (UniqueName: \"kubernetes.io/projected/fc21a491-20da-4b24-aa0b-e7a2c9545445-kube-api-access-nqcb7\") pod \"ovn-controller-jx7tr-config-rgz54\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:02 crc kubenswrapper[4919]: I0930 20:31:02.091628 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:02 crc kubenswrapper[4919]: I0930 20:31:02.605454 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"567de3cf-1a4f-426d-b4d5-da78ead6e923","Type":"ContainerStarted","Data":"60c286ef96249077e4f4a7963fc707dec52b96f550928c462035531baa60c398"} Sep 30 20:31:02 crc kubenswrapper[4919]: I0930 20:31:02.898959 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jx7tr-config-rgz54"] Sep 30 20:31:03 crc kubenswrapper[4919]: I0930 20:31:03.063880 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-8zx8k"] Sep 30 20:31:03 crc kubenswrapper[4919]: I0930 20:31:03.615266 4919 generic.go:334] "Generic (PLEG): container finished" podID="fc21a491-20da-4b24-aa0b-e7a2c9545445" containerID="f4d0b64baf8db1cdca9e217a3018da4c1ed4bacb6f0da768aabf52273e0b8039" exitCode=0 Sep 30 20:31:03 crc kubenswrapper[4919]: I0930 20:31:03.615390 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jx7tr-config-rgz54" event={"ID":"fc21a491-20da-4b24-aa0b-e7a2c9545445","Type":"ContainerDied","Data":"f4d0b64baf8db1cdca9e217a3018da4c1ed4bacb6f0da768aabf52273e0b8039"} Sep 30 20:31:03 crc kubenswrapper[4919]: I0930 20:31:03.615640 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jx7tr-config-rgz54" event={"ID":"fc21a491-20da-4b24-aa0b-e7a2c9545445","Type":"ContainerStarted","Data":"548434f979b35be1291c568b29cb11b81fab26637499d5cfe3f83c71fd2bbd0a"} Sep 30 20:31:03 crc kubenswrapper[4919]: I0930 20:31:03.616705 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8zx8k" event={"ID":"15afefc9-4042-464a-ae52-966e5b6f0ffb","Type":"ContainerStarted","Data":"fdcfb41d3e1c9bec388f6d47b95f6c4878e18a40df840549887240f745873714"} Sep 30 20:31:03 crc kubenswrapper[4919]: I0930 20:31:03.616828 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:31:03 crc kubenswrapper[4919]: I0930 20:31:03.666837 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371964.187956 podStartE2EDuration="1m12.666819844s" podCreationTimestamp="2025-09-30 20:29:51 +0000 UTC" firstStartedPulling="2025-09-30 20:29:53.354438569 +0000 UTC m=+978.470471696" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:31:03.662319105 +0000 UTC m=+1048.778352272" watchObservedRunningTime="2025-09-30 20:31:03.666819844 +0000 UTC m=+1048.782852971" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.355343 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.397569 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqcb7\" (UniqueName: \"kubernetes.io/projected/fc21a491-20da-4b24-aa0b-e7a2c9545445-kube-api-access-nqcb7\") pod \"fc21a491-20da-4b24-aa0b-e7a2c9545445\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.397610 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-log-ovn\") pod \"fc21a491-20da-4b24-aa0b-e7a2c9545445\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.397664 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fc21a491-20da-4b24-aa0b-e7a2c9545445-additional-scripts\") pod \"fc21a491-20da-4b24-aa0b-e7a2c9545445\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.397751 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-run\") pod \"fc21a491-20da-4b24-aa0b-e7a2c9545445\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.397782 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc21a491-20da-4b24-aa0b-e7a2c9545445-scripts\") pod \"fc21a491-20da-4b24-aa0b-e7a2c9545445\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.397833 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-run-ovn\") pod \"fc21a491-20da-4b24-aa0b-e7a2c9545445\" (UID: \"fc21a491-20da-4b24-aa0b-e7a2c9545445\") " Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.398366 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "fc21a491-20da-4b24-aa0b-e7a2c9545445" (UID: "fc21a491-20da-4b24-aa0b-e7a2c9545445"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.400661 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-run" (OuterVolumeSpecName: "var-run") pod "fc21a491-20da-4b24-aa0b-e7a2c9545445" (UID: "fc21a491-20da-4b24-aa0b-e7a2c9545445"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.400745 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "fc21a491-20da-4b24-aa0b-e7a2c9545445" (UID: "fc21a491-20da-4b24-aa0b-e7a2c9545445"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.401466 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc21a491-20da-4b24-aa0b-e7a2c9545445-scripts" (OuterVolumeSpecName: "scripts") pod "fc21a491-20da-4b24-aa0b-e7a2c9545445" (UID: "fc21a491-20da-4b24-aa0b-e7a2c9545445"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.403564 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc21a491-20da-4b24-aa0b-e7a2c9545445-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "fc21a491-20da-4b24-aa0b-e7a2c9545445" (UID: "fc21a491-20da-4b24-aa0b-e7a2c9545445"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.409333 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc21a491-20da-4b24-aa0b-e7a2c9545445-kube-api-access-nqcb7" (OuterVolumeSpecName: "kube-api-access-nqcb7") pod "fc21a491-20da-4b24-aa0b-e7a2c9545445" (UID: "fc21a491-20da-4b24-aa0b-e7a2c9545445"). InnerVolumeSpecName "kube-api-access-nqcb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.499763 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqcb7\" (UniqueName: \"kubernetes.io/projected/fc21a491-20da-4b24-aa0b-e7a2c9545445-kube-api-access-nqcb7\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.499811 4919 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.499825 4919 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/fc21a491-20da-4b24-aa0b-e7a2c9545445-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.499840 4919 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.499852 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc21a491-20da-4b24-aa0b-e7a2c9545445-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.499865 4919 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fc21a491-20da-4b24-aa0b-e7a2c9545445-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.640353 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jx7tr-config-rgz54" Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.641382 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jx7tr-config-rgz54" event={"ID":"fc21a491-20da-4b24-aa0b-e7a2c9545445","Type":"ContainerDied","Data":"548434f979b35be1291c568b29cb11b81fab26637499d5cfe3f83c71fd2bbd0a"} Sep 30 20:31:05 crc kubenswrapper[4919]: I0930 20:31:05.641431 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="548434f979b35be1291c568b29cb11b81fab26637499d5cfe3f83c71fd2bbd0a" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.339501 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-jx7tr" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.519317 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jx7tr-config-rgz54"] Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.529802 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jx7tr-config-rgz54"] Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.560378 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jx7tr-config-sxtmn"] Sep 30 20:31:06 crc kubenswrapper[4919]: E0930 20:31:06.560722 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc21a491-20da-4b24-aa0b-e7a2c9545445" containerName="ovn-config" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.560737 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc21a491-20da-4b24-aa0b-e7a2c9545445" containerName="ovn-config" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.560912 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc21a491-20da-4b24-aa0b-e7a2c9545445" containerName="ovn-config" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.563502 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.565478 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.586315 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jx7tr-config-sxtmn"] Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.633475 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-log-ovn\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.633793 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-run\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.633824 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfcj9\" (UniqueName: \"kubernetes.io/projected/4a3cfecf-41c9-4061-9abb-946cd33eba64-kube-api-access-qfcj9\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.633848 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4a3cfecf-41c9-4061-9abb-946cd33eba64-additional-scripts\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.633873 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-run-ovn\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.633935 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a3cfecf-41c9-4061-9abb-946cd33eba64-scripts\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.653783 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"dd24c869a5452428e58817ab996590b138de1e11010dc9545608163ee4e796cd"} Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.653830 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"430819ea0a86bd45f40e462d7a05b52c85d4fcecec232ecc12ea03e3092210a4"} Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.653843 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"5eabe10dfb6e5bf1f9042565adfb16731e188f2a47da0e36176ab736957d2f98"} Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.736255 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-log-ovn\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.736380 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-run\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.736445 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfcj9\" (UniqueName: \"kubernetes.io/projected/4a3cfecf-41c9-4061-9abb-946cd33eba64-kube-api-access-qfcj9\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.736478 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4a3cfecf-41c9-4061-9abb-946cd33eba64-additional-scripts\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.736509 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-run-ovn\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.736585 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a3cfecf-41c9-4061-9abb-946cd33eba64-scripts\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.737411 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-log-ovn\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.737463 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-run\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.738832 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-run-ovn\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.738901 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a3cfecf-41c9-4061-9abb-946cd33eba64-scripts\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.744446 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4a3cfecf-41c9-4061-9abb-946cd33eba64-additional-scripts\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.766814 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfcj9\" (UniqueName: \"kubernetes.io/projected/4a3cfecf-41c9-4061-9abb-946cd33eba64-kube-api-access-qfcj9\") pod \"ovn-controller-jx7tr-config-sxtmn\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:06 crc kubenswrapper[4919]: I0930 20:31:06.902448 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:07 crc kubenswrapper[4919]: I0930 20:31:07.480201 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jx7tr-config-sxtmn"] Sep 30 20:31:07 crc kubenswrapper[4919]: W0930 20:31:07.491828 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a3cfecf_41c9_4061_9abb_946cd33eba64.slice/crio-8a4594b23c297ecf1276ecb01d7c1de31aa1c7a8b579ba05708ea40745d4d8f6 WatchSource:0}: Error finding container 8a4594b23c297ecf1276ecb01d7c1de31aa1c7a8b579ba05708ea40745d4d8f6: Status 404 returned error can't find the container with id 8a4594b23c297ecf1276ecb01d7c1de31aa1c7a8b579ba05708ea40745d4d8f6 Sep 30 20:31:07 crc kubenswrapper[4919]: I0930 20:31:07.650450 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc21a491-20da-4b24-aa0b-e7a2c9545445" path="/var/lib/kubelet/pods/fc21a491-20da-4b24-aa0b-e7a2c9545445/volumes" Sep 30 20:31:07 crc kubenswrapper[4919]: I0930 20:31:07.670656 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"d4a46c4a1f478668d423fe56e01f19702410324ca018f543b757b36d6bc0a2c1"} Sep 30 20:31:07 crc kubenswrapper[4919]: I0930 20:31:07.670705 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"0bf5652fe1da44528c9d58537584eb1f4835ef43da2f0589c786543b0f48ac48"} Sep 30 20:31:07 crc kubenswrapper[4919]: I0930 20:31:07.670719 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"02ba7641b73485b316bc03306360e3dd18cde0d8ccd498741b78a3e43a764c2c"} Sep 30 20:31:07 crc kubenswrapper[4919]: I0930 20:31:07.672232 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jx7tr-config-sxtmn" event={"ID":"4a3cfecf-41c9-4061-9abb-946cd33eba64","Type":"ContainerStarted","Data":"8a4594b23c297ecf1276ecb01d7c1de31aa1c7a8b579ba05708ea40745d4d8f6"} Sep 30 20:31:08 crc kubenswrapper[4919]: I0930 20:31:08.707368 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"30462126-2244-47cd-8076-12744196012d","Type":"ContainerStarted","Data":"d97e5fd5ffb9a2081b99474f2416c1e8fe4f0d6af80a4eb2f4c3402805c29ff2"} Sep 30 20:31:08 crc kubenswrapper[4919]: I0930 20:31:08.712909 4919 generic.go:334] "Generic (PLEG): container finished" podID="4a3cfecf-41c9-4061-9abb-946cd33eba64" containerID="6764c23eea435c9d1cb7462a9d7b55aa5bd4466c48656175559734d83b4496c8" exitCode=0 Sep 30 20:31:08 crc kubenswrapper[4919]: I0930 20:31:08.712949 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jx7tr-config-sxtmn" event={"ID":"4a3cfecf-41c9-4061-9abb-946cd33eba64","Type":"ContainerDied","Data":"6764c23eea435c9d1cb7462a9d7b55aa5bd4466c48656175559734d83b4496c8"} Sep 30 20:31:08 crc kubenswrapper[4919]: I0930 20:31:08.770566 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=22.281701338 podStartE2EDuration="31.770548689s" podCreationTimestamp="2025-09-30 20:30:37 +0000 UTC" firstStartedPulling="2025-09-30 20:30:55.828297678 +0000 UTC m=+1040.944330805" lastFinishedPulling="2025-09-30 20:31:05.317145029 +0000 UTC m=+1050.433178156" observedRunningTime="2025-09-30 20:31:08.754490158 +0000 UTC m=+1053.870523295" watchObservedRunningTime="2025-09-30 20:31:08.770548689 +0000 UTC m=+1053.886581816" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.020708 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-n8vc2"] Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.021979 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.023964 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.033301 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-n8vc2"] Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.204869 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-config\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.204909 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.204975 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrjnv\" (UniqueName: \"kubernetes.io/projected/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-kube-api-access-wrjnv\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.205001 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.205629 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.205671 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.307512 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrjnv\" (UniqueName: \"kubernetes.io/projected/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-kube-api-access-wrjnv\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.307561 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.307627 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.307651 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.307710 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-config\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.307742 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.308648 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-ovsdbserver-nb\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.308756 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-dns-swift-storage-0\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.308866 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-config\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.308908 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-dns-svc\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.308944 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-ovsdbserver-sb\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.328119 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrjnv\" (UniqueName: \"kubernetes.io/projected/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-kube-api-access-wrjnv\") pod \"dnsmasq-dns-77585f5f8c-n8vc2\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.405596 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:09 crc kubenswrapper[4919]: I0930 20:31:09.884128 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-n8vc2"] Sep 30 20:31:09 crc kubenswrapper[4919]: W0930 20:31:09.905736 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76fffd4e_e9d7_4dad_8b29_58d71bc2215e.slice/crio-2227da2729d3c56dd1349fa4422ca0ffcb44bca3043de5444d4046ccf32af930 WatchSource:0}: Error finding container 2227da2729d3c56dd1349fa4422ca0ffcb44bca3043de5444d4046ccf32af930: Status 404 returned error can't find the container with id 2227da2729d3c56dd1349fa4422ca0ffcb44bca3043de5444d4046ccf32af930 Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.005199 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.122967 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4a3cfecf-41c9-4061-9abb-946cd33eba64-additional-scripts\") pod \"4a3cfecf-41c9-4061-9abb-946cd33eba64\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.123123 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a3cfecf-41c9-4061-9abb-946cd33eba64-scripts\") pod \"4a3cfecf-41c9-4061-9abb-946cd33eba64\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.123174 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-run-ovn\") pod \"4a3cfecf-41c9-4061-9abb-946cd33eba64\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.123302 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfcj9\" (UniqueName: \"kubernetes.io/projected/4a3cfecf-41c9-4061-9abb-946cd33eba64-kube-api-access-qfcj9\") pod \"4a3cfecf-41c9-4061-9abb-946cd33eba64\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.123357 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-run\") pod \"4a3cfecf-41c9-4061-9abb-946cd33eba64\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.123375 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-log-ovn\") pod \"4a3cfecf-41c9-4061-9abb-946cd33eba64\" (UID: \"4a3cfecf-41c9-4061-9abb-946cd33eba64\") " Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.123749 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "4a3cfecf-41c9-4061-9abb-946cd33eba64" (UID: "4a3cfecf-41c9-4061-9abb-946cd33eba64"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.123786 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "4a3cfecf-41c9-4061-9abb-946cd33eba64" (UID: "4a3cfecf-41c9-4061-9abb-946cd33eba64"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.123971 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a3cfecf-41c9-4061-9abb-946cd33eba64-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "4a3cfecf-41c9-4061-9abb-946cd33eba64" (UID: "4a3cfecf-41c9-4061-9abb-946cd33eba64"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.124040 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-run" (OuterVolumeSpecName: "var-run") pod "4a3cfecf-41c9-4061-9abb-946cd33eba64" (UID: "4a3cfecf-41c9-4061-9abb-946cd33eba64"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.124281 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a3cfecf-41c9-4061-9abb-946cd33eba64-scripts" (OuterVolumeSpecName: "scripts") pod "4a3cfecf-41c9-4061-9abb-946cd33eba64" (UID: "4a3cfecf-41c9-4061-9abb-946cd33eba64"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.128639 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a3cfecf-41c9-4061-9abb-946cd33eba64-kube-api-access-qfcj9" (OuterVolumeSpecName: "kube-api-access-qfcj9") pod "4a3cfecf-41c9-4061-9abb-946cd33eba64" (UID: "4a3cfecf-41c9-4061-9abb-946cd33eba64"). InnerVolumeSpecName "kube-api-access-qfcj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.229031 4919 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-run\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.229064 4919 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.229076 4919 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/4a3cfecf-41c9-4061-9abb-946cd33eba64-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.229086 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4a3cfecf-41c9-4061-9abb-946cd33eba64-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.229128 4919 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/4a3cfecf-41c9-4061-9abb-946cd33eba64-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.229136 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfcj9\" (UniqueName: \"kubernetes.io/projected/4a3cfecf-41c9-4061-9abb-946cd33eba64-kube-api-access-qfcj9\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.739295 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jx7tr-config-sxtmn" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.739328 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jx7tr-config-sxtmn" event={"ID":"4a3cfecf-41c9-4061-9abb-946cd33eba64","Type":"ContainerDied","Data":"8a4594b23c297ecf1276ecb01d7c1de31aa1c7a8b579ba05708ea40745d4d8f6"} Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.739665 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a4594b23c297ecf1276ecb01d7c1de31aa1c7a8b579ba05708ea40745d4d8f6" Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.741645 4919 generic.go:334] "Generic (PLEG): container finished" podID="76fffd4e-e9d7-4dad-8b29-58d71bc2215e" containerID="2653ce2cc5a72e4b96caa75f96969ea5326a5b48c523874688f9df7e466f3312" exitCode=0 Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.741676 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" event={"ID":"76fffd4e-e9d7-4dad-8b29-58d71bc2215e","Type":"ContainerDied","Data":"2653ce2cc5a72e4b96caa75f96969ea5326a5b48c523874688f9df7e466f3312"} Sep 30 20:31:10 crc kubenswrapper[4919]: I0930 20:31:10.741709 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" event={"ID":"76fffd4e-e9d7-4dad-8b29-58d71bc2215e","Type":"ContainerStarted","Data":"2227da2729d3c56dd1349fa4422ca0ffcb44bca3043de5444d4046ccf32af930"} Sep 30 20:31:11 crc kubenswrapper[4919]: I0930 20:31:11.133968 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jx7tr-config-sxtmn"] Sep 30 20:31:11 crc kubenswrapper[4919]: I0930 20:31:11.147258 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jx7tr-config-sxtmn"] Sep 30 20:31:11 crc kubenswrapper[4919]: I0930 20:31:11.657490 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a3cfecf-41c9-4061-9abb-946cd33eba64" path="/var/lib/kubelet/pods/4a3cfecf-41c9-4061-9abb-946cd33eba64/volumes" Sep 30 20:31:11 crc kubenswrapper[4919]: I0930 20:31:11.752335 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" event={"ID":"76fffd4e-e9d7-4dad-8b29-58d71bc2215e","Type":"ContainerStarted","Data":"c3a43acba6cf4087bbf1acca2e78acf679ce1d36e777c315d95b371efc1b47e9"} Sep 30 20:31:11 crc kubenswrapper[4919]: I0930 20:31:11.752913 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:11 crc kubenswrapper[4919]: I0930 20:31:11.775775 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" podStartSLOduration=3.7757575660000002 podStartE2EDuration="3.775757566s" podCreationTimestamp="2025-09-30 20:31:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:31:11.774336985 +0000 UTC m=+1056.890370112" watchObservedRunningTime="2025-09-30 20:31:11.775757566 +0000 UTC m=+1056.891790703" Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.409460 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.707728 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-4ppnv"] Sep 30 20:31:12 crc kubenswrapper[4919]: E0930 20:31:12.708115 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a3cfecf-41c9-4061-9abb-946cd33eba64" containerName="ovn-config" Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.708132 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a3cfecf-41c9-4061-9abb-946cd33eba64" containerName="ovn-config" Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.708305 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a3cfecf-41c9-4061-9abb-946cd33eba64" containerName="ovn-config" Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.708832 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4ppnv" Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.724363 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-4ppnv"] Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.783326 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x949\" (UniqueName: \"kubernetes.io/projected/06a29aeb-b634-4850-96ad-559fa2318076-kube-api-access-9x949\") pod \"barbican-db-create-4ppnv\" (UID: \"06a29aeb-b634-4850-96ad-559fa2318076\") " pod="openstack/barbican-db-create-4ppnv" Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.793262 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.803711 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-wc9vm"] Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.805159 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-wc9vm" Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.819015 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-wc9vm"] Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.884486 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x949\" (UniqueName: \"kubernetes.io/projected/06a29aeb-b634-4850-96ad-559fa2318076-kube-api-access-9x949\") pod \"barbican-db-create-4ppnv\" (UID: \"06a29aeb-b634-4850-96ad-559fa2318076\") " pod="openstack/barbican-db-create-4ppnv" Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.884607 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mst75\" (UniqueName: \"kubernetes.io/projected/8548a93e-e608-46bd-a4da-32876305fe67-kube-api-access-mst75\") pod \"cinder-db-create-wc9vm\" (UID: \"8548a93e-e608-46bd-a4da-32876305fe67\") " pod="openstack/cinder-db-create-wc9vm" Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.927055 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x949\" (UniqueName: \"kubernetes.io/projected/06a29aeb-b634-4850-96ad-559fa2318076-kube-api-access-9x949\") pod \"barbican-db-create-4ppnv\" (UID: \"06a29aeb-b634-4850-96ad-559fa2318076\") " pod="openstack/barbican-db-create-4ppnv" Sep 30 20:31:12 crc kubenswrapper[4919]: I0930 20:31:12.986951 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mst75\" (UniqueName: \"kubernetes.io/projected/8548a93e-e608-46bd-a4da-32876305fe67-kube-api-access-mst75\") pod \"cinder-db-create-wc9vm\" (UID: \"8548a93e-e608-46bd-a4da-32876305fe67\") " pod="openstack/cinder-db-create-wc9vm" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.008605 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-xp5t9"] Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.009721 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xp5t9" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.011920 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mst75\" (UniqueName: \"kubernetes.io/projected/8548a93e-e608-46bd-a4da-32876305fe67-kube-api-access-mst75\") pod \"cinder-db-create-wc9vm\" (UID: \"8548a93e-e608-46bd-a4da-32876305fe67\") " pod="openstack/cinder-db-create-wc9vm" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.021003 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-xp5t9"] Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.029773 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4ppnv" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.088584 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46fvr\" (UniqueName: \"kubernetes.io/projected/2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7-kube-api-access-46fvr\") pod \"neutron-db-create-xp5t9\" (UID: \"2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7\") " pod="openstack/neutron-db-create-xp5t9" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.127789 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-wc9vm" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.143085 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-xxfbx"] Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.144571 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.147532 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-76kff" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.147609 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.147632 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.147715 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.158349 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-xxfbx"] Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.190407 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46fvr\" (UniqueName: \"kubernetes.io/projected/2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7-kube-api-access-46fvr\") pod \"neutron-db-create-xp5t9\" (UID: \"2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7\") " pod="openstack/neutron-db-create-xp5t9" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.208613 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46fvr\" (UniqueName: \"kubernetes.io/projected/2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7-kube-api-access-46fvr\") pod \"neutron-db-create-xp5t9\" (UID: \"2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7\") " pod="openstack/neutron-db-create-xp5t9" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.291921 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-combined-ca-bundle\") pod \"keystone-db-sync-xxfbx\" (UID: \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\") " pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.291999 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-config-data\") pod \"keystone-db-sync-xxfbx\" (UID: \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\") " pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.292045 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkk8h\" (UniqueName: \"kubernetes.io/projected/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-kube-api-access-zkk8h\") pod \"keystone-db-sync-xxfbx\" (UID: \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\") " pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.356383 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xp5t9" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.392969 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkk8h\" (UniqueName: \"kubernetes.io/projected/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-kube-api-access-zkk8h\") pod \"keystone-db-sync-xxfbx\" (UID: \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\") " pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.393093 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-combined-ca-bundle\") pod \"keystone-db-sync-xxfbx\" (UID: \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\") " pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.393132 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-config-data\") pod \"keystone-db-sync-xxfbx\" (UID: \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\") " pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.396774 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-config-data\") pod \"keystone-db-sync-xxfbx\" (UID: \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\") " pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.409207 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkk8h\" (UniqueName: \"kubernetes.io/projected/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-kube-api-access-zkk8h\") pod \"keystone-db-sync-xxfbx\" (UID: \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\") " pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.417692 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-combined-ca-bundle\") pod \"keystone-db-sync-xxfbx\" (UID: \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\") " pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:13 crc kubenswrapper[4919]: I0930 20:31:13.464716 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:19 crc kubenswrapper[4919]: I0930 20:31:19.408980 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:19 crc kubenswrapper[4919]: I0930 20:31:19.486126 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-v4hj5"] Sep 30 20:31:19 crc kubenswrapper[4919]: I0930 20:31:19.486367 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-v4hj5" podUID="3f366a95-3a67-445a-9682-ec419dc21deb" containerName="dnsmasq-dns" containerID="cri-o://5dfa852199e81247e0f8bb47782146020480d46d9987d54f3751d19074a32a84" gracePeriod=10 Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:19.730976 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-wc9vm"] Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:19.742081 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-xp5t9"] Sep 30 20:31:20 crc kubenswrapper[4919]: W0930 20:31:19.763992 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8548a93e_e608_46bd_a4da_32876305fe67.slice/crio-84298d51d1e49aa521604facec3f1800902b7a127ff51efa84417e4ff70bc880 WatchSource:0}: Error finding container 84298d51d1e49aa521604facec3f1800902b7a127ff51efa84417e4ff70bc880: Status 404 returned error can't find the container with id 84298d51d1e49aa521604facec3f1800902b7a127ff51efa84417e4ff70bc880 Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:19.773654 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-xxfbx"] Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:19.864768 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-4ppnv"] Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:19.873849 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xxfbx" event={"ID":"bbc82d06-dc8f-4fc1-884f-43213a1b4d36","Type":"ContainerStarted","Data":"09acd274f2cd5d40bae1cfb7095fdaa9cf03038f24d9b2a90da48690b0d26da8"} Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:19.874802 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-wc9vm" event={"ID":"8548a93e-e608-46bd-a4da-32876305fe67","Type":"ContainerStarted","Data":"84298d51d1e49aa521604facec3f1800902b7a127ff51efa84417e4ff70bc880"} Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:19.876750 4919 generic.go:334] "Generic (PLEG): container finished" podID="3f366a95-3a67-445a-9682-ec419dc21deb" containerID="5dfa852199e81247e0f8bb47782146020480d46d9987d54f3751d19074a32a84" exitCode=0 Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:19.876789 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-v4hj5" event={"ID":"3f366a95-3a67-445a-9682-ec419dc21deb","Type":"ContainerDied","Data":"5dfa852199e81247e0f8bb47782146020480d46d9987d54f3751d19074a32a84"} Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:19.878325 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-xp5t9" event={"ID":"2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7","Type":"ContainerStarted","Data":"662b754af1e72666b95395cf8d5f129b32c60d09935db8cec24558e989cdd474"} Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.412204 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.550425 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-config\") pod \"3f366a95-3a67-445a-9682-ec419dc21deb\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.550791 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-ovsdbserver-nb\") pod \"3f366a95-3a67-445a-9682-ec419dc21deb\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.550827 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bvnt\" (UniqueName: \"kubernetes.io/projected/3f366a95-3a67-445a-9682-ec419dc21deb-kube-api-access-6bvnt\") pod \"3f366a95-3a67-445a-9682-ec419dc21deb\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.550920 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-dns-svc\") pod \"3f366a95-3a67-445a-9682-ec419dc21deb\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.550961 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-ovsdbserver-sb\") pod \"3f366a95-3a67-445a-9682-ec419dc21deb\" (UID: \"3f366a95-3a67-445a-9682-ec419dc21deb\") " Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.572401 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f366a95-3a67-445a-9682-ec419dc21deb-kube-api-access-6bvnt" (OuterVolumeSpecName: "kube-api-access-6bvnt") pod "3f366a95-3a67-445a-9682-ec419dc21deb" (UID: "3f366a95-3a67-445a-9682-ec419dc21deb"). InnerVolumeSpecName "kube-api-access-6bvnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.599058 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3f366a95-3a67-445a-9682-ec419dc21deb" (UID: "3f366a95-3a67-445a-9682-ec419dc21deb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.608413 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-config" (OuterVolumeSpecName: "config") pod "3f366a95-3a67-445a-9682-ec419dc21deb" (UID: "3f366a95-3a67-445a-9682-ec419dc21deb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.617725 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3f366a95-3a67-445a-9682-ec419dc21deb" (UID: "3f366a95-3a67-445a-9682-ec419dc21deb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.620186 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3f366a95-3a67-445a-9682-ec419dc21deb" (UID: "3f366a95-3a67-445a-9682-ec419dc21deb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.652527 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.652551 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bvnt\" (UniqueName: \"kubernetes.io/projected/3f366a95-3a67-445a-9682-ec419dc21deb-kube-api-access-6bvnt\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.652561 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.652571 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.652579 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f366a95-3a67-445a-9682-ec419dc21deb-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.886430 4919 generic.go:334] "Generic (PLEG): container finished" podID="2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7" containerID="d965fe69f11531bdf4fab973c86209af712bd6280b255f2906cd980a346d06ea" exitCode=0 Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.886499 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-xp5t9" event={"ID":"2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7","Type":"ContainerDied","Data":"d965fe69f11531bdf4fab973c86209af712bd6280b255f2906cd980a346d06ea"} Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.887785 4919 generic.go:334] "Generic (PLEG): container finished" podID="06a29aeb-b634-4850-96ad-559fa2318076" containerID="f1279c802da8a7639ef68e428cae15e02484fda82bc523aa06d66c57bd0e6b00" exitCode=0 Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.887856 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4ppnv" event={"ID":"06a29aeb-b634-4850-96ad-559fa2318076","Type":"ContainerDied","Data":"f1279c802da8a7639ef68e428cae15e02484fda82bc523aa06d66c57bd0e6b00"} Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.887884 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4ppnv" event={"ID":"06a29aeb-b634-4850-96ad-559fa2318076","Type":"ContainerStarted","Data":"b6b80969b521de4cac5bacc80ab4d5b5da352394cbbdac547d805f3a1c160216"} Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.888989 4919 generic.go:334] "Generic (PLEG): container finished" podID="8548a93e-e608-46bd-a4da-32876305fe67" containerID="fdfdd656f1044b84a2301fd80854d1b556da700a1929d71dc30e9f26695bd605" exitCode=0 Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.889030 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-wc9vm" event={"ID":"8548a93e-e608-46bd-a4da-32876305fe67","Type":"ContainerDied","Data":"fdfdd656f1044b84a2301fd80854d1b556da700a1929d71dc30e9f26695bd605"} Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.890685 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8zx8k" event={"ID":"15afefc9-4042-464a-ae52-966e5b6f0ffb","Type":"ContainerStarted","Data":"30edca9fd6d10cfaadb28401f43560f9697b1c5720dabb4524ef279ae3d85d3e"} Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.895895 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-v4hj5" event={"ID":"3f366a95-3a67-445a-9682-ec419dc21deb","Type":"ContainerDied","Data":"24f63fdd75f1dbbfcfb9c663f7e17bee603fb1e7e46639dd35bf455c8d4ace2b"} Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.895936 4919 scope.go:117] "RemoveContainer" containerID="5dfa852199e81247e0f8bb47782146020480d46d9987d54f3751d19074a32a84" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.896034 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-v4hj5" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.931787 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-8zx8k" podStartSLOduration=3.780793357 podStartE2EDuration="19.931764643s" podCreationTimestamp="2025-09-30 20:31:01 +0000 UTC" firstStartedPulling="2025-09-30 20:31:03.082716393 +0000 UTC m=+1048.198749520" lastFinishedPulling="2025-09-30 20:31:19.233687629 +0000 UTC m=+1064.349720806" observedRunningTime="2025-09-30 20:31:20.925326858 +0000 UTC m=+1066.041359985" watchObservedRunningTime="2025-09-30 20:31:20.931764643 +0000 UTC m=+1066.047797770" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.936105 4919 scope.go:117] "RemoveContainer" containerID="c3abd68d4514d515945d35438eb611d1dfd7314646f9adcbe57f46fb4a785ca8" Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.982954 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-v4hj5"] Sep 30 20:31:20 crc kubenswrapper[4919]: I0930 20:31:20.992358 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-v4hj5"] Sep 30 20:31:21 crc kubenswrapper[4919]: I0930 20:31:21.643052 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f366a95-3a67-445a-9682-ec419dc21deb" path="/var/lib/kubelet/pods/3f366a95-3a67-445a-9682-ec419dc21deb/volumes" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.717156 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4ppnv" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.726370 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-wc9vm" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.733469 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xp5t9" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.830958 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9x949\" (UniqueName: \"kubernetes.io/projected/06a29aeb-b634-4850-96ad-559fa2318076-kube-api-access-9x949\") pod \"06a29aeb-b634-4850-96ad-559fa2318076\" (UID: \"06a29aeb-b634-4850-96ad-559fa2318076\") " Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.831126 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46fvr\" (UniqueName: \"kubernetes.io/projected/2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7-kube-api-access-46fvr\") pod \"2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7\" (UID: \"2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7\") " Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.831269 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mst75\" (UniqueName: \"kubernetes.io/projected/8548a93e-e608-46bd-a4da-32876305fe67-kube-api-access-mst75\") pod \"8548a93e-e608-46bd-a4da-32876305fe67\" (UID: \"8548a93e-e608-46bd-a4da-32876305fe67\") " Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.836810 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8548a93e-e608-46bd-a4da-32876305fe67-kube-api-access-mst75" (OuterVolumeSpecName: "kube-api-access-mst75") pod "8548a93e-e608-46bd-a4da-32876305fe67" (UID: "8548a93e-e608-46bd-a4da-32876305fe67"). InnerVolumeSpecName "kube-api-access-mst75". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.836897 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06a29aeb-b634-4850-96ad-559fa2318076-kube-api-access-9x949" (OuterVolumeSpecName: "kube-api-access-9x949") pod "06a29aeb-b634-4850-96ad-559fa2318076" (UID: "06a29aeb-b634-4850-96ad-559fa2318076"). InnerVolumeSpecName "kube-api-access-9x949". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.839270 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7-kube-api-access-46fvr" (OuterVolumeSpecName: "kube-api-access-46fvr") pod "2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7" (UID: "2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7"). InnerVolumeSpecName "kube-api-access-46fvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.933162 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mst75\" (UniqueName: \"kubernetes.io/projected/8548a93e-e608-46bd-a4da-32876305fe67-kube-api-access-mst75\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.933236 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9x949\" (UniqueName: \"kubernetes.io/projected/06a29aeb-b634-4850-96ad-559fa2318076-kube-api-access-9x949\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.933255 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46fvr\" (UniqueName: \"kubernetes.io/projected/2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7-kube-api-access-46fvr\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.944673 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-xp5t9" event={"ID":"2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7","Type":"ContainerDied","Data":"662b754af1e72666b95395cf8d5f129b32c60d09935db8cec24558e989cdd474"} Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.944701 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-xp5t9" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.944708 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="662b754af1e72666b95395cf8d5f129b32c60d09935db8cec24558e989cdd474" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.946810 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-4ppnv" event={"ID":"06a29aeb-b634-4850-96ad-559fa2318076","Type":"ContainerDied","Data":"b6b80969b521de4cac5bacc80ab4d5b5da352394cbbdac547d805f3a1c160216"} Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.946859 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6b80969b521de4cac5bacc80ab4d5b5da352394cbbdac547d805f3a1c160216" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.946892 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-4ppnv" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.948565 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xxfbx" event={"ID":"bbc82d06-dc8f-4fc1-884f-43213a1b4d36","Type":"ContainerStarted","Data":"8960f60e742ad16e0744f3e91c0ef748e0e41594619634fde7ad11556f711ce9"} Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.951795 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-wc9vm" event={"ID":"8548a93e-e608-46bd-a4da-32876305fe67","Type":"ContainerDied","Data":"84298d51d1e49aa521604facec3f1800902b7a127ff51efa84417e4ff70bc880"} Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.951821 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84298d51d1e49aa521604facec3f1800902b7a127ff51efa84417e4ff70bc880" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.951859 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-wc9vm" Sep 30 20:31:24 crc kubenswrapper[4919]: I0930 20:31:24.985203 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-xxfbx" podStartSLOduration=7.253769781 podStartE2EDuration="11.985175559s" podCreationTimestamp="2025-09-30 20:31:13 +0000 UTC" firstStartedPulling="2025-09-30 20:31:19.848546551 +0000 UTC m=+1064.964579678" lastFinishedPulling="2025-09-30 20:31:24.579952289 +0000 UTC m=+1069.695985456" observedRunningTime="2025-09-30 20:31:24.975824831 +0000 UTC m=+1070.091857988" watchObservedRunningTime="2025-09-30 20:31:24.985175559 +0000 UTC m=+1070.101208726" Sep 30 20:31:26 crc kubenswrapper[4919]: I0930 20:31:26.061630 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:31:26 crc kubenswrapper[4919]: I0930 20:31:26.061987 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:31:26 crc kubenswrapper[4919]: I0930 20:31:26.969575 4919 generic.go:334] "Generic (PLEG): container finished" podID="15afefc9-4042-464a-ae52-966e5b6f0ffb" containerID="30edca9fd6d10cfaadb28401f43560f9697b1c5720dabb4524ef279ae3d85d3e" exitCode=0 Sep 30 20:31:26 crc kubenswrapper[4919]: I0930 20:31:26.969623 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8zx8k" event={"ID":"15afefc9-4042-464a-ae52-966e5b6f0ffb","Type":"ContainerDied","Data":"30edca9fd6d10cfaadb28401f43560f9697b1c5720dabb4524ef279ae3d85d3e"} Sep 30 20:31:27 crc kubenswrapper[4919]: I0930 20:31:27.986096 4919 generic.go:334] "Generic (PLEG): container finished" podID="bbc82d06-dc8f-4fc1-884f-43213a1b4d36" containerID="8960f60e742ad16e0744f3e91c0ef748e0e41594619634fde7ad11556f711ce9" exitCode=0 Sep 30 20:31:27 crc kubenswrapper[4919]: I0930 20:31:27.986177 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xxfbx" event={"ID":"bbc82d06-dc8f-4fc1-884f-43213a1b4d36","Type":"ContainerDied","Data":"8960f60e742ad16e0744f3e91c0ef748e0e41594619634fde7ad11556f711ce9"} Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.495612 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.600199 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-config-data\") pod \"15afefc9-4042-464a-ae52-966e5b6f0ffb\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.600339 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-combined-ca-bundle\") pod \"15afefc9-4042-464a-ae52-966e5b6f0ffb\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.600412 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-db-sync-config-data\") pod \"15afefc9-4042-464a-ae52-966e5b6f0ffb\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.600459 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8tqq\" (UniqueName: \"kubernetes.io/projected/15afefc9-4042-464a-ae52-966e5b6f0ffb-kube-api-access-t8tqq\") pod \"15afefc9-4042-464a-ae52-966e5b6f0ffb\" (UID: \"15afefc9-4042-464a-ae52-966e5b6f0ffb\") " Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.606600 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "15afefc9-4042-464a-ae52-966e5b6f0ffb" (UID: "15afefc9-4042-464a-ae52-966e5b6f0ffb"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.609096 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15afefc9-4042-464a-ae52-966e5b6f0ffb-kube-api-access-t8tqq" (OuterVolumeSpecName: "kube-api-access-t8tqq") pod "15afefc9-4042-464a-ae52-966e5b6f0ffb" (UID: "15afefc9-4042-464a-ae52-966e5b6f0ffb"). InnerVolumeSpecName "kube-api-access-t8tqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.632743 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15afefc9-4042-464a-ae52-966e5b6f0ffb" (UID: "15afefc9-4042-464a-ae52-966e5b6f0ffb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.683910 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-config-data" (OuterVolumeSpecName: "config-data") pod "15afefc9-4042-464a-ae52-966e5b6f0ffb" (UID: "15afefc9-4042-464a-ae52-966e5b6f0ffb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.702514 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.702541 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.702556 4919 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/15afefc9-4042-464a-ae52-966e5b6f0ffb-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:28 crc kubenswrapper[4919]: I0930 20:31:28.702570 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8tqq\" (UniqueName: \"kubernetes.io/projected/15afefc9-4042-464a-ae52-966e5b6f0ffb-kube-api-access-t8tqq\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.003937 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-8zx8k" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.004708 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-8zx8k" event={"ID":"15afefc9-4042-464a-ae52-966e5b6f0ffb","Type":"ContainerDied","Data":"fdcfb41d3e1c9bec388f6d47b95f6c4878e18a40df840549887240f745873714"} Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.004743 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fdcfb41d3e1c9bec388f6d47b95f6c4878e18a40df840549887240f745873714" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.405492 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-g72gx"] Sep 30 20:31:29 crc kubenswrapper[4919]: E0930 20:31:29.406003 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15afefc9-4042-464a-ae52-966e5b6f0ffb" containerName="glance-db-sync" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.406020 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="15afefc9-4042-464a-ae52-966e5b6f0ffb" containerName="glance-db-sync" Sep 30 20:31:29 crc kubenswrapper[4919]: E0930 20:31:29.406038 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06a29aeb-b634-4850-96ad-559fa2318076" containerName="mariadb-database-create" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.406045 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="06a29aeb-b634-4850-96ad-559fa2318076" containerName="mariadb-database-create" Sep 30 20:31:29 crc kubenswrapper[4919]: E0930 20:31:29.406053 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7" containerName="mariadb-database-create" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.406059 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7" containerName="mariadb-database-create" Sep 30 20:31:29 crc kubenswrapper[4919]: E0930 20:31:29.406072 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8548a93e-e608-46bd-a4da-32876305fe67" containerName="mariadb-database-create" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.406077 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8548a93e-e608-46bd-a4da-32876305fe67" containerName="mariadb-database-create" Sep 30 20:31:29 crc kubenswrapper[4919]: E0930 20:31:29.406091 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f366a95-3a67-445a-9682-ec419dc21deb" containerName="init" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.406096 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f366a95-3a67-445a-9682-ec419dc21deb" containerName="init" Sep 30 20:31:29 crc kubenswrapper[4919]: E0930 20:31:29.406110 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f366a95-3a67-445a-9682-ec419dc21deb" containerName="dnsmasq-dns" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.406117 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f366a95-3a67-445a-9682-ec419dc21deb" containerName="dnsmasq-dns" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.406273 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="15afefc9-4042-464a-ae52-966e5b6f0ffb" containerName="glance-db-sync" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.406286 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7" containerName="mariadb-database-create" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.406295 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f366a95-3a67-445a-9682-ec419dc21deb" containerName="dnsmasq-dns" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.406326 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="06a29aeb-b634-4850-96ad-559fa2318076" containerName="mariadb-database-create" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.406340 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="8548a93e-e608-46bd-a4da-32876305fe67" containerName="mariadb-database-create" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.409517 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.418595 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-g72gx"] Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.476997 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.514349 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.514428 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-config\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.514484 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.514589 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xt87\" (UniqueName: \"kubernetes.io/projected/8418e0c4-c86d-45ed-bef9-550c30e7f796-kube-api-access-8xt87\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.514651 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.514686 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.615280 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkk8h\" (UniqueName: \"kubernetes.io/projected/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-kube-api-access-zkk8h\") pod \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\" (UID: \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\") " Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.615332 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-combined-ca-bundle\") pod \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\" (UID: \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\") " Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.615446 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-config-data\") pod \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\" (UID: \"bbc82d06-dc8f-4fc1-884f-43213a1b4d36\") " Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.615683 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xt87\" (UniqueName: \"kubernetes.io/projected/8418e0c4-c86d-45ed-bef9-550c30e7f796-kube-api-access-8xt87\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.615732 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.615758 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.615789 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.615815 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-config\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.615846 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.617055 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-config\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.617089 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-dns-svc\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.617601 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-dns-swift-storage-0\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.617660 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-ovsdbserver-sb\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.617791 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-ovsdbserver-nb\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.627919 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-kube-api-access-zkk8h" (OuterVolumeSpecName: "kube-api-access-zkk8h") pod "bbc82d06-dc8f-4fc1-884f-43213a1b4d36" (UID: "bbc82d06-dc8f-4fc1-884f-43213a1b4d36"). InnerVolumeSpecName "kube-api-access-zkk8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.631011 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xt87\" (UniqueName: \"kubernetes.io/projected/8418e0c4-c86d-45ed-bef9-550c30e7f796-kube-api-access-8xt87\") pod \"dnsmasq-dns-7ff5475cc9-g72gx\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.637420 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bbc82d06-dc8f-4fc1-884f-43213a1b4d36" (UID: "bbc82d06-dc8f-4fc1-884f-43213a1b4d36"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.658260 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-config-data" (OuterVolumeSpecName: "config-data") pod "bbc82d06-dc8f-4fc1-884f-43213a1b4d36" (UID: "bbc82d06-dc8f-4fc1-884f-43213a1b4d36"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.717988 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.718025 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.718038 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkk8h\" (UniqueName: \"kubernetes.io/projected/bbc82d06-dc8f-4fc1-884f-43213a1b4d36-kube-api-access-zkk8h\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:29 crc kubenswrapper[4919]: I0930 20:31:29.787735 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.020453 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xxfbx" event={"ID":"bbc82d06-dc8f-4fc1-884f-43213a1b4d36","Type":"ContainerDied","Data":"09acd274f2cd5d40bae1cfb7095fdaa9cf03038f24d9b2a90da48690b0d26da8"} Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.020802 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09acd274f2cd5d40bae1cfb7095fdaa9cf03038f24d9b2a90da48690b0d26da8" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.020868 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xxfbx" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.242102 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-g72gx"] Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.314921 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-g72gx"] Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.387588 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg"] Sep 30 20:31:30 crc kubenswrapper[4919]: E0930 20:31:30.388094 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbc82d06-dc8f-4fc1-884f-43213a1b4d36" containerName="keystone-db-sync" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.388108 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbc82d06-dc8f-4fc1-884f-43213a1b4d36" containerName="keystone-db-sync" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.388330 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbc82d06-dc8f-4fc1-884f-43213a1b4d36" containerName="keystone-db-sync" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.398410 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.425424 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg"] Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.451252 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-qd4pk"] Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.466595 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.470987 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.471414 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.471559 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-qd4pk"] Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.473758 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-76kff" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.490656 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.524824 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.528342 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.532679 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-config-data\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.532730 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qltj2\" (UniqueName: \"kubernetes.io/projected/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-kube-api-access-qltj2\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.532795 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-credential-keys\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.532856 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.532887 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-config\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.532932 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.532965 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.533008 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-combined-ca-bundle\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.533268 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.533299 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-scripts\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.539600 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-fernet-keys\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.539664 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nlz4\" (UniqueName: \"kubernetes.io/projected/483b2110-904f-42b0-a634-81b7ee4f6642-kube-api-access-6nlz4\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.539781 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.534277 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.534479 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.640890 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-scripts\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641148 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-fernet-keys\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641172 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nlz4\" (UniqueName: \"kubernetes.io/projected/483b2110-904f-42b0-a634-81b7ee4f6642-kube-api-access-6nlz4\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641209 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641239 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641258 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-log-httpd\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641277 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641294 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-scripts\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641317 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-run-httpd\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641332 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjxc6\" (UniqueName: \"kubernetes.io/projected/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-kube-api-access-kjxc6\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641358 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-config-data\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641376 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-config-data\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641397 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qltj2\" (UniqueName: \"kubernetes.io/projected/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-kube-api-access-qltj2\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641418 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-credential-keys\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641442 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641456 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-config\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641472 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641493 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.641512 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-combined-ca-bundle\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.645615 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-combined-ca-bundle\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.646368 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-ovsdbserver-sb\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.646886 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-config\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.647434 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-ovsdbserver-nb\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.647910 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-dns-swift-storage-0\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.649138 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-credential-keys\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.649623 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-dns-svc\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.649764 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-fernet-keys\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.652746 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-config-data\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.655334 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-scripts\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.685004 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qltj2\" (UniqueName: \"kubernetes.io/projected/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-kube-api-access-qltj2\") pod \"dnsmasq-dns-5c5cc7c5ff-hkmpg\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.686955 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nlz4\" (UniqueName: \"kubernetes.io/projected/483b2110-904f-42b0-a634-81b7ee4f6642-kube-api-access-6nlz4\") pod \"keystone-bootstrap-qd4pk\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.717014 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-vnrf4"] Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.718318 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.721664 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.721853 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-pjdx6" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.721980 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.732630 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-vnrf4"] Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.747098 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-scripts\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.747138 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-config-data\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.747236 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-combined-ca-bundle\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.747274 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7db423ab-427e-425f-a5d4-10ec71302c12-logs\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.747320 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.747339 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.747363 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-log-httpd\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.747390 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-scripts\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.747411 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-run-httpd\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.747426 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjxc6\" (UniqueName: \"kubernetes.io/projected/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-kube-api-access-kjxc6\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.747442 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vpqv\" (UniqueName: \"kubernetes.io/projected/7db423ab-427e-425f-a5d4-10ec71302c12-kube-api-access-9vpqv\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.747475 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-config-data\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.752090 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-run-httpd\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.752493 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-log-httpd\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.755888 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg"] Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.756600 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.757695 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.774001 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-scripts\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.779886 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.785030 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-config-data\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.797951 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjxc6\" (UniqueName: \"kubernetes.io/projected/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-kube-api-access-kjxc6\") pod \"ceilometer-0\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.817321 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-rflv5"] Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.818879 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.832870 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.838484 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-rflv5"] Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.851801 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-scripts\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.851990 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-config-data\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.852069 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbfdp\" (UniqueName: \"kubernetes.io/projected/40d00a75-5b72-4341-8618-1abb614b53cb-kube-api-access-dbfdp\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.852164 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.852262 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.852334 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-combined-ca-bundle\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.852407 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7db423ab-427e-425f-a5d4-10ec71302c12-logs\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.852485 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.852570 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.852647 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vpqv\" (UniqueName: \"kubernetes.io/projected/7db423ab-427e-425f-a5d4-10ec71302c12-kube-api-access-9vpqv\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.852716 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-config\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.855896 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7db423ab-427e-425f-a5d4-10ec71302c12-logs\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.856908 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-combined-ca-bundle\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.858342 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-scripts\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.858508 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-config-data\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.859888 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.873750 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vpqv\" (UniqueName: \"kubernetes.io/projected/7db423ab-427e-425f-a5d4-10ec71302c12-kube-api-access-9vpqv\") pod \"placement-db-sync-vnrf4\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.954685 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-config\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.954996 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbfdp\" (UniqueName: \"kubernetes.io/projected/40d00a75-5b72-4341-8618-1abb614b53cb-kube-api-access-dbfdp\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.955026 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.955050 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.955108 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.955136 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.955895 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-ovsdbserver-sb\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.957096 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-ovsdbserver-nb\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.957681 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-dns-svc\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.958439 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-dns-swift-storage-0\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.959222 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-config\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:30 crc kubenswrapper[4919]: I0930 20:31:30.983533 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbfdp\" (UniqueName: \"kubernetes.io/projected/40d00a75-5b72-4341-8618-1abb614b53cb-kube-api-access-dbfdp\") pod \"dnsmasq-dns-8b5c85b87-rflv5\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.032148 4919 generic.go:334] "Generic (PLEG): container finished" podID="8418e0c4-c86d-45ed-bef9-550c30e7f796" containerID="46a709b08471cfc922143a45f6e864aec9139712a21e97b8a9fd1b3662d34f7f" exitCode=0 Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.032192 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" event={"ID":"8418e0c4-c86d-45ed-bef9-550c30e7f796","Type":"ContainerDied","Data":"46a709b08471cfc922143a45f6e864aec9139712a21e97b8a9fd1b3662d34f7f"} Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.032232 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" event={"ID":"8418e0c4-c86d-45ed-bef9-550c30e7f796","Type":"ContainerStarted","Data":"cbf0607f9c818cad8f2f4b29b721d5d759fa7cd3e04ef465fe36d21b67dff16e"} Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.157850 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.164583 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.247136 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg"] Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.354686 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.372487 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-qd4pk"] Sep 30 20:31:31 crc kubenswrapper[4919]: W0930 20:31:31.391612 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod483b2110_904f_42b0_a634_81b7ee4f6642.slice/crio-5265a77faa52764721c63defefbb8822b83b73c31b11008824dd4b4ee8b576a0 WatchSource:0}: Error finding container 5265a77faa52764721c63defefbb8822b83b73c31b11008824dd4b4ee8b576a0: Status 404 returned error can't find the container with id 5265a77faa52764721c63defefbb8822b83b73c31b11008824dd4b4ee8b576a0 Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.427359 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:31:31 crc kubenswrapper[4919]: E0930 20:31:31.427892 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8418e0c4-c86d-45ed-bef9-550c30e7f796" containerName="init" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.427945 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8418e0c4-c86d-45ed-bef9-550c30e7f796" containerName="init" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.428188 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="8418e0c4-c86d-45ed-bef9-550c30e7f796" containerName="init" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.429533 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.456554 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.457350 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.457536 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-gmsjp" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.469608 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.506569 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.531816 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.533793 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.536816 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.546310 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.586695 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-ovsdbserver-nb\") pod \"8418e0c4-c86d-45ed-bef9-550c30e7f796\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.586745 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-config\") pod \"8418e0c4-c86d-45ed-bef9-550c30e7f796\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.586863 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xt87\" (UniqueName: \"kubernetes.io/projected/8418e0c4-c86d-45ed-bef9-550c30e7f796-kube-api-access-8xt87\") pod \"8418e0c4-c86d-45ed-bef9-550c30e7f796\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.586898 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-ovsdbserver-sb\") pod \"8418e0c4-c86d-45ed-bef9-550c30e7f796\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.586930 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-dns-swift-storage-0\") pod \"8418e0c4-c86d-45ed-bef9-550c30e7f796\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.587000 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-dns-svc\") pod \"8418e0c4-c86d-45ed-bef9-550c30e7f796\" (UID: \"8418e0c4-c86d-45ed-bef9-550c30e7f796\") " Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.587265 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/42ca60af-7b90-492d-9f42-ba79af142539-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.587303 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x64fw\" (UniqueName: \"kubernetes.io/projected/42ca60af-7b90-492d-9f42-ba79af142539-kube-api-access-x64fw\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.587338 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42ca60af-7b90-492d-9f42-ba79af142539-logs\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.587380 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-scripts\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.587419 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.587476 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-config-data\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.587554 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.601457 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8418e0c4-c86d-45ed-bef9-550c30e7f796-kube-api-access-8xt87" (OuterVolumeSpecName: "kube-api-access-8xt87") pod "8418e0c4-c86d-45ed-bef9-550c30e7f796" (UID: "8418e0c4-c86d-45ed-bef9-550c30e7f796"). InnerVolumeSpecName "kube-api-access-8xt87". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.617000 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8418e0c4-c86d-45ed-bef9-550c30e7f796" (UID: "8418e0c4-c86d-45ed-bef9-550c30e7f796"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.620168 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8418e0c4-c86d-45ed-bef9-550c30e7f796" (UID: "8418e0c4-c86d-45ed-bef9-550c30e7f796"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.635636 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8418e0c4-c86d-45ed-bef9-550c30e7f796" (UID: "8418e0c4-c86d-45ed-bef9-550c30e7f796"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.646009 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-config" (OuterVolumeSpecName: "config") pod "8418e0c4-c86d-45ed-bef9-550c30e7f796" (UID: "8418e0c4-c86d-45ed-bef9-550c30e7f796"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.667332 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8418e0c4-c86d-45ed-bef9-550c30e7f796" (UID: "8418e0c4-c86d-45ed-bef9-550c30e7f796"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688474 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688535 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/42ca60af-7b90-492d-9f42-ba79af142539-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688553 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x64fw\" (UniqueName: \"kubernetes.io/projected/42ca60af-7b90-492d-9f42-ba79af142539-kube-api-access-x64fw\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688576 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42ca60af-7b90-492d-9f42-ba79af142539-logs\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688606 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-scripts\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688635 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688671 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688690 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-config-data\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688712 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jmlg\" (UniqueName: \"kubernetes.io/projected/0f3ff6e5-7345-4844-89cd-460533571f13-kube-api-access-9jmlg\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688740 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688770 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688797 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f3ff6e5-7345-4844-89cd-460533571f13-logs\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688814 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688839 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f3ff6e5-7345-4844-89cd-460533571f13-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688882 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688893 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688902 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xt87\" (UniqueName: \"kubernetes.io/projected/8418e0c4-c86d-45ed-bef9-550c30e7f796-kube-api-access-8xt87\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688913 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688923 4919 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.688934 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8418e0c4-c86d-45ed-bef9-550c30e7f796-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.689410 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/42ca60af-7b90-492d-9f42-ba79af142539-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.689839 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.690650 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42ca60af-7b90-492d-9f42-ba79af142539-logs\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.702082 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.704171 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-scripts\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.708871 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-config-data\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.716674 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x64fw\" (UniqueName: \"kubernetes.io/projected/42ca60af-7b90-492d-9f42-ba79af142539-kube-api-access-x64fw\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.773134 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.784320 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-vnrf4"] Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.789823 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f3ff6e5-7345-4844-89cd-460533571f13-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.789861 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.789995 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.790028 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jmlg\" (UniqueName: \"kubernetes.io/projected/0f3ff6e5-7345-4844-89cd-460533571f13-kube-api-access-9jmlg\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.790051 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.790134 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.790166 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f3ff6e5-7345-4844-89cd-460533571f13-logs\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.790604 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f3ff6e5-7345-4844-89cd-460533571f13-logs\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.791285 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.791536 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f3ff6e5-7345-4844-89cd-460533571f13-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.792135 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-rflv5"] Sep 30 20:31:31 crc kubenswrapper[4919]: W0930 20:31:31.794752 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7db423ab_427e_425f_a5d4_10ec71302c12.slice/crio-e5eb01d85eadd08b4bb52e97a2e3657b9c60644b56aa5a5b54be103d2ce0e325 WatchSource:0}: Error finding container e5eb01d85eadd08b4bb52e97a2e3657b9c60644b56aa5a5b54be103d2ce0e325: Status 404 returned error can't find the container with id e5eb01d85eadd08b4bb52e97a2e3657b9c60644b56aa5a5b54be103d2ce0e325 Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.795733 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.796430 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.803229 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.805491 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jmlg\" (UniqueName: \"kubernetes.io/projected/0f3ff6e5-7345-4844-89cd-460533571f13-kube-api-access-9jmlg\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.821874 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:31 crc kubenswrapper[4919]: I0930 20:31:31.994442 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.025542 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.050504 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" event={"ID":"40d00a75-5b72-4341-8618-1abb614b53cb","Type":"ContainerStarted","Data":"eb5b2c5fab103429a461f49d6ddd266b9242079e49e40d5c5e7e0793d6760d28"} Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.050741 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" event={"ID":"40d00a75-5b72-4341-8618-1abb614b53cb","Type":"ContainerStarted","Data":"2b7189c49995956dd90418447f2f91b37243df2a6e76823b64e901095bf5eec8"} Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.053099 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2","Type":"ContainerStarted","Data":"128d1d24b1ae14e5677d960a6e1162162b6665780cf29525d25492b895d0db5c"} Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.058930 4919 generic.go:334] "Generic (PLEG): container finished" podID="1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8" containerID="91cfe0a96f58faf6a3783b33833a94f11358f79b3cebcef6276c3c8e7f721b6d" exitCode=0 Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.059245 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" event={"ID":"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8","Type":"ContainerDied","Data":"91cfe0a96f58faf6a3783b33833a94f11358f79b3cebcef6276c3c8e7f721b6d"} Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.059292 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" event={"ID":"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8","Type":"ContainerStarted","Data":"53cc86f94376d9debffd2b0ffc0415a0a2641d05a86b116ad23124faa8f6520d"} Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.061653 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" event={"ID":"8418e0c4-c86d-45ed-bef9-550c30e7f796","Type":"ContainerDied","Data":"cbf0607f9c818cad8f2f4b29b721d5d759fa7cd3e04ef465fe36d21b67dff16e"} Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.061706 4919 scope.go:117] "RemoveContainer" containerID="46a709b08471cfc922143a45f6e864aec9139712a21e97b8a9fd1b3662d34f7f" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.061874 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ff5475cc9-g72gx" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.092530 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-vnrf4" event={"ID":"7db423ab-427e-425f-a5d4-10ec71302c12","Type":"ContainerStarted","Data":"e5eb01d85eadd08b4bb52e97a2e3657b9c60644b56aa5a5b54be103d2ce0e325"} Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.119049 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-qd4pk" event={"ID":"483b2110-904f-42b0-a634-81b7ee4f6642","Type":"ContainerStarted","Data":"b14153c70f19e91da910b2b172f1f2b0ce92112a9f3379d578588ec39d00db9d"} Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.119093 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-qd4pk" event={"ID":"483b2110-904f-42b0-a634-81b7ee4f6642","Type":"ContainerStarted","Data":"5265a77faa52764721c63defefbb8822b83b73c31b11008824dd4b4ee8b576a0"} Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.265733 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-g72gx"] Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.272118 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7ff5475cc9-g72gx"] Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.293408 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-qd4pk" podStartSLOduration=2.293389329 podStartE2EDuration="2.293389329s" podCreationTimestamp="2025-09-30 20:31:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:31:32.236688153 +0000 UTC m=+1077.352721270" watchObservedRunningTime="2025-09-30 20:31:32.293389329 +0000 UTC m=+1077.409422456" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.667269 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.766588 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8c41-account-create-crpcb"] Sep 30 20:31:32 crc kubenswrapper[4919]: E0930 20:31:32.767347 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8" containerName="init" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.767370 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8" containerName="init" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.767563 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8" containerName="init" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.770270 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8c41-account-create-crpcb" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.772466 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.807734 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8c41-account-create-crpcb"] Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.818776 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-ovsdbserver-sb\") pod \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.818824 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-dns-swift-storage-0\") pod \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.818871 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-ovsdbserver-nb\") pod \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.818935 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qltj2\" (UniqueName: \"kubernetes.io/projected/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-kube-api-access-qltj2\") pod \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.819021 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-config\") pod \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.819147 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-dns-svc\") pod \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\" (UID: \"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8\") " Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.850803 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-kube-api-access-qltj2" (OuterVolumeSpecName: "kube-api-access-qltj2") pod "1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8" (UID: "1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8"). InnerVolumeSpecName "kube-api-access-qltj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.855300 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8" (UID: "1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.877127 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-config" (OuterVolumeSpecName: "config") pod "1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8" (UID: "1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.880653 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.894427 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8" (UID: "1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.904559 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8" (UID: "1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.905967 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8" (UID: "1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:32 crc kubenswrapper[4919]: W0930 20:31:32.906697 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42ca60af_7b90_492d_9f42_ba79af142539.slice/crio-14edeb59b396b5df7d35b063f127a8b4450ec006e2ac55f19c5ca3864cda972f WatchSource:0}: Error finding container 14edeb59b396b5df7d35b063f127a8b4450ec006e2ac55f19c5ca3864cda972f: Status 404 returned error can't find the container with id 14edeb59b396b5df7d35b063f127a8b4450ec006e2ac55f19c5ca3864cda972f Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.927007 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mwbl\" (UniqueName: \"kubernetes.io/projected/aa111051-1cd6-4015-b901-d7247d9a6128-kube-api-access-2mwbl\") pod \"barbican-8c41-account-create-crpcb\" (UID: \"aa111051-1cd6-4015-b901-d7247d9a6128\") " pod="openstack/barbican-8c41-account-create-crpcb" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.927069 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.927081 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.927089 4919 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.927098 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.927160 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qltj2\" (UniqueName: \"kubernetes.io/projected/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-kube-api-access-qltj2\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.927189 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.978706 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-0dfd-account-create-cknrq"] Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.981546 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0dfd-account-create-cknrq" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.983750 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 30 20:31:32 crc kubenswrapper[4919]: I0930 20:31:32.989735 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-0dfd-account-create-cknrq"] Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.028887 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mwbl\" (UniqueName: \"kubernetes.io/projected/aa111051-1cd6-4015-b901-d7247d9a6128-kube-api-access-2mwbl\") pod \"barbican-8c41-account-create-crpcb\" (UID: \"aa111051-1cd6-4015-b901-d7247d9a6128\") " pod="openstack/barbican-8c41-account-create-crpcb" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.046945 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mwbl\" (UniqueName: \"kubernetes.io/projected/aa111051-1cd6-4015-b901-d7247d9a6128-kube-api-access-2mwbl\") pod \"barbican-8c41-account-create-crpcb\" (UID: \"aa111051-1cd6-4015-b901-d7247d9a6128\") " pod="openstack/barbican-8c41-account-create-crpcb" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.100483 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8c41-account-create-crpcb" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.131643 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57pzf\" (UniqueName: \"kubernetes.io/projected/315fa417-7e9e-4c30-ac6e-3dd472837602-kube-api-access-57pzf\") pod \"cinder-0dfd-account-create-cknrq\" (UID: \"315fa417-7e9e-4c30-ac6e-3dd472837602\") " pod="openstack/cinder-0dfd-account-create-cknrq" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.142833 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"42ca60af-7b90-492d-9f42-ba79af142539","Type":"ContainerStarted","Data":"14edeb59b396b5df7d35b063f127a8b4450ec006e2ac55f19c5ca3864cda972f"} Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.151440 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" event={"ID":"1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8","Type":"ContainerDied","Data":"53cc86f94376d9debffd2b0ffc0415a0a2641d05a86b116ad23124faa8f6520d"} Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.151503 4919 scope.go:117] "RemoveContainer" containerID="91cfe0a96f58faf6a3783b33833a94f11358f79b3cebcef6276c3c8e7f721b6d" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.151635 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.188996 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7448-account-create-sz726"] Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.191855 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7448-account-create-sz726" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.192487 4919 generic.go:334] "Generic (PLEG): container finished" podID="40d00a75-5b72-4341-8618-1abb614b53cb" containerID="eb5b2c5fab103429a461f49d6ddd266b9242079e49e40d5c5e7e0793d6760d28" exitCode=0 Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.193333 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" event={"ID":"40d00a75-5b72-4341-8618-1abb614b53cb","Type":"ContainerDied","Data":"eb5b2c5fab103429a461f49d6ddd266b9242079e49e40d5c5e7e0793d6760d28"} Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.198625 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.210048 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7448-account-create-sz726"] Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.234588 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57pzf\" (UniqueName: \"kubernetes.io/projected/315fa417-7e9e-4c30-ac6e-3dd472837602-kube-api-access-57pzf\") pod \"cinder-0dfd-account-create-cknrq\" (UID: \"315fa417-7e9e-4c30-ac6e-3dd472837602\") " pod="openstack/cinder-0dfd-account-create-cknrq" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.236828 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg"] Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.244253 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c5cc7c5ff-hkmpg"] Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.255981 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57pzf\" (UniqueName: \"kubernetes.io/projected/315fa417-7e9e-4c30-ac6e-3dd472837602-kube-api-access-57pzf\") pod \"cinder-0dfd-account-create-cknrq\" (UID: \"315fa417-7e9e-4c30-ac6e-3dd472837602\") " pod="openstack/cinder-0dfd-account-create-cknrq" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.306822 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0dfd-account-create-cknrq" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.336405 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ftr6\" (UniqueName: \"kubernetes.io/projected/ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0-kube-api-access-6ftr6\") pod \"neutron-7448-account-create-sz726\" (UID: \"ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0\") " pod="openstack/neutron-7448-account-create-sz726" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.397890 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8c41-account-create-crpcb"] Sep 30 20:31:33 crc kubenswrapper[4919]: W0930 20:31:33.400247 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa111051_1cd6_4015_b901_d7247d9a6128.slice/crio-fd2a63df1f3e69db941bd3649c37a5e05e3b927bed541a9b371f57b031de6474 WatchSource:0}: Error finding container fd2a63df1f3e69db941bd3649c37a5e05e3b927bed541a9b371f57b031de6474: Status 404 returned error can't find the container with id fd2a63df1f3e69db941bd3649c37a5e05e3b927bed541a9b371f57b031de6474 Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.440029 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ftr6\" (UniqueName: \"kubernetes.io/projected/ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0-kube-api-access-6ftr6\") pod \"neutron-7448-account-create-sz726\" (UID: \"ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0\") " pod="openstack/neutron-7448-account-create-sz726" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.460093 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ftr6\" (UniqueName: \"kubernetes.io/projected/ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0-kube-api-access-6ftr6\") pod \"neutron-7448-account-create-sz726\" (UID: \"ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0\") " pod="openstack/neutron-7448-account-create-sz726" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.525196 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7448-account-create-sz726" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.658824 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8" path="/var/lib/kubelet/pods/1bfc8d1a-ee5c-43c0-810e-ccc162ebc7b8/volumes" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.660492 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8418e0c4-c86d-45ed-bef9-550c30e7f796" path="/var/lib/kubelet/pods/8418e0c4-c86d-45ed-bef9-550c30e7f796/volumes" Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.732857 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-0dfd-account-create-cknrq"] Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.826121 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.902118 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.946492 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:31:33 crc kubenswrapper[4919]: I0930 20:31:33.953055 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:31:34 crc kubenswrapper[4919]: I0930 20:31:34.107750 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7448-account-create-sz726"] Sep 30 20:31:34 crc kubenswrapper[4919]: W0930 20:31:34.114020 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce5e1f4a_1a65_4c03_bf3c_1f669b4731b0.slice/crio-88ba2bc298ccf04f0c7fff2aadcb2ddeb54a6516902a229308c86b608e4420c4 WatchSource:0}: Error finding container 88ba2bc298ccf04f0c7fff2aadcb2ddeb54a6516902a229308c86b608e4420c4: Status 404 returned error can't find the container with id 88ba2bc298ccf04f0c7fff2aadcb2ddeb54a6516902a229308c86b608e4420c4 Sep 30 20:31:34 crc kubenswrapper[4919]: I0930 20:31:34.202528 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0dfd-account-create-cknrq" event={"ID":"315fa417-7e9e-4c30-ac6e-3dd472837602","Type":"ContainerStarted","Data":"9f7f7b4e167e4ffc56a05b97e071caf3d170bdc691e341a6d2069c0b18dbc445"} Sep 30 20:31:34 crc kubenswrapper[4919]: I0930 20:31:34.203686 4919 generic.go:334] "Generic (PLEG): container finished" podID="aa111051-1cd6-4015-b901-d7247d9a6128" containerID="ebb9272c1b0f8828ce5ba0881c736cc29f60d950f43cdc9c0816043deea425fa" exitCode=0 Sep 30 20:31:34 crc kubenswrapper[4919]: I0930 20:31:34.203743 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8c41-account-create-crpcb" event={"ID":"aa111051-1cd6-4015-b901-d7247d9a6128","Type":"ContainerDied","Data":"ebb9272c1b0f8828ce5ba0881c736cc29f60d950f43cdc9c0816043deea425fa"} Sep 30 20:31:34 crc kubenswrapper[4919]: I0930 20:31:34.203760 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8c41-account-create-crpcb" event={"ID":"aa111051-1cd6-4015-b901-d7247d9a6128","Type":"ContainerStarted","Data":"fd2a63df1f3e69db941bd3649c37a5e05e3b927bed541a9b371f57b031de6474"} Sep 30 20:31:34 crc kubenswrapper[4919]: I0930 20:31:34.208767 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" event={"ID":"40d00a75-5b72-4341-8618-1abb614b53cb","Type":"ContainerStarted","Data":"3721b47eb484f189890c244c55ec3bf9ec1a039895753039a04e59791b73bf27"} Sep 30 20:31:34 crc kubenswrapper[4919]: I0930 20:31:34.210238 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7448-account-create-sz726" event={"ID":"ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0","Type":"ContainerStarted","Data":"88ba2bc298ccf04f0c7fff2aadcb2ddeb54a6516902a229308c86b608e4420c4"} Sep 30 20:31:34 crc kubenswrapper[4919]: I0930 20:31:34.211565 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0f3ff6e5-7345-4844-89cd-460533571f13","Type":"ContainerStarted","Data":"f323eff56f71b8041be4ac09e16aba27b3a1d7e55b14f1b620252be8dcb77a3b"} Sep 30 20:31:35 crc kubenswrapper[4919]: I0930 20:31:35.671025 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8c41-account-create-crpcb" Sep 30 20:31:35 crc kubenswrapper[4919]: I0930 20:31:35.789018 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mwbl\" (UniqueName: \"kubernetes.io/projected/aa111051-1cd6-4015-b901-d7247d9a6128-kube-api-access-2mwbl\") pod \"aa111051-1cd6-4015-b901-d7247d9a6128\" (UID: \"aa111051-1cd6-4015-b901-d7247d9a6128\") " Sep 30 20:31:35 crc kubenswrapper[4919]: I0930 20:31:35.797187 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa111051-1cd6-4015-b901-d7247d9a6128-kube-api-access-2mwbl" (OuterVolumeSpecName: "kube-api-access-2mwbl") pod "aa111051-1cd6-4015-b901-d7247d9a6128" (UID: "aa111051-1cd6-4015-b901-d7247d9a6128"). InnerVolumeSpecName "kube-api-access-2mwbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:35 crc kubenswrapper[4919]: I0930 20:31:35.891501 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mwbl\" (UniqueName: \"kubernetes.io/projected/aa111051-1cd6-4015-b901-d7247d9a6128-kube-api-access-2mwbl\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:36 crc kubenswrapper[4919]: I0930 20:31:36.231040 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"42ca60af-7b90-492d-9f42-ba79af142539","Type":"ContainerStarted","Data":"b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd"} Sep 30 20:31:36 crc kubenswrapper[4919]: I0930 20:31:36.233189 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8c41-account-create-crpcb" event={"ID":"aa111051-1cd6-4015-b901-d7247d9a6128","Type":"ContainerDied","Data":"fd2a63df1f3e69db941bd3649c37a5e05e3b927bed541a9b371f57b031de6474"} Sep 30 20:31:36 crc kubenswrapper[4919]: I0930 20:31:36.233254 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd2a63df1f3e69db941bd3649c37a5e05e3b927bed541a9b371f57b031de6474" Sep 30 20:31:36 crc kubenswrapper[4919]: I0930 20:31:36.233311 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8c41-account-create-crpcb" Sep 30 20:31:37 crc kubenswrapper[4919]: I0930 20:31:37.251138 4919 generic.go:334] "Generic (PLEG): container finished" podID="315fa417-7e9e-4c30-ac6e-3dd472837602" containerID="12a009d45ec0aac6c0db0e01f5c6af7d2ad76cf868bf9c3a91ca2368f49af468" exitCode=0 Sep 30 20:31:37 crc kubenswrapper[4919]: I0930 20:31:37.251409 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0dfd-account-create-cknrq" event={"ID":"315fa417-7e9e-4c30-ac6e-3dd472837602","Type":"ContainerDied","Data":"12a009d45ec0aac6c0db0e01f5c6af7d2ad76cf868bf9c3a91ca2368f49af468"} Sep 30 20:31:37 crc kubenswrapper[4919]: I0930 20:31:37.255725 4919 generic.go:334] "Generic (PLEG): container finished" podID="ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0" containerID="a93c041930d3298e860bfe7641781371461cc363eb119d276618c926fa30c0ca" exitCode=0 Sep 30 20:31:37 crc kubenswrapper[4919]: I0930 20:31:37.255767 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7448-account-create-sz726" event={"ID":"ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0","Type":"ContainerDied","Data":"a93c041930d3298e860bfe7641781371461cc363eb119d276618c926fa30c0ca"} Sep 30 20:31:37 crc kubenswrapper[4919]: I0930 20:31:37.257563 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0f3ff6e5-7345-4844-89cd-460533571f13","Type":"ContainerStarted","Data":"47dbeac33623f0ab547308a65d977164431e92a885fbbd058b191a63e4375763"} Sep 30 20:31:37 crc kubenswrapper[4919]: I0930 20:31:37.257683 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:37 crc kubenswrapper[4919]: I0930 20:31:37.293870 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" podStartSLOduration=7.293856463 podStartE2EDuration="7.293856463s" podCreationTimestamp="2025-09-30 20:31:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:31:37.289056455 +0000 UTC m=+1082.405089582" watchObservedRunningTime="2025-09-30 20:31:37.293856463 +0000 UTC m=+1082.409889590" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.127382 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-jphnh"] Sep 30 20:31:38 crc kubenswrapper[4919]: E0930 20:31:38.128109 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa111051-1cd6-4015-b901-d7247d9a6128" containerName="mariadb-account-create" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.128131 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa111051-1cd6-4015-b901-d7247d9a6128" containerName="mariadb-account-create" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.128381 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa111051-1cd6-4015-b901-d7247d9a6128" containerName="mariadb-account-create" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.129065 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jphnh" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.132421 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-95shp" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.132645 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.134326 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jphnh"] Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.232790 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdn58\" (UniqueName: \"kubernetes.io/projected/39cdbc20-9bb1-4527-8195-f2b885c676a4-kube-api-access-tdn58\") pod \"barbican-db-sync-jphnh\" (UID: \"39cdbc20-9bb1-4527-8195-f2b885c676a4\") " pod="openstack/barbican-db-sync-jphnh" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.232891 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39cdbc20-9bb1-4527-8195-f2b885c676a4-combined-ca-bundle\") pod \"barbican-db-sync-jphnh\" (UID: \"39cdbc20-9bb1-4527-8195-f2b885c676a4\") " pod="openstack/barbican-db-sync-jphnh" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.232918 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/39cdbc20-9bb1-4527-8195-f2b885c676a4-db-sync-config-data\") pod \"barbican-db-sync-jphnh\" (UID: \"39cdbc20-9bb1-4527-8195-f2b885c676a4\") " pod="openstack/barbican-db-sync-jphnh" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.267317 4919 generic.go:334] "Generic (PLEG): container finished" podID="483b2110-904f-42b0-a634-81b7ee4f6642" containerID="b14153c70f19e91da910b2b172f1f2b0ce92112a9f3379d578588ec39d00db9d" exitCode=0 Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.267369 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-qd4pk" event={"ID":"483b2110-904f-42b0-a634-81b7ee4f6642","Type":"ContainerDied","Data":"b14153c70f19e91da910b2b172f1f2b0ce92112a9f3379d578588ec39d00db9d"} Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.271224 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.334527 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdn58\" (UniqueName: \"kubernetes.io/projected/39cdbc20-9bb1-4527-8195-f2b885c676a4-kube-api-access-tdn58\") pod \"barbican-db-sync-jphnh\" (UID: \"39cdbc20-9bb1-4527-8195-f2b885c676a4\") " pod="openstack/barbican-db-sync-jphnh" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.334643 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39cdbc20-9bb1-4527-8195-f2b885c676a4-combined-ca-bundle\") pod \"barbican-db-sync-jphnh\" (UID: \"39cdbc20-9bb1-4527-8195-f2b885c676a4\") " pod="openstack/barbican-db-sync-jphnh" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.334691 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/39cdbc20-9bb1-4527-8195-f2b885c676a4-db-sync-config-data\") pod \"barbican-db-sync-jphnh\" (UID: \"39cdbc20-9bb1-4527-8195-f2b885c676a4\") " pod="openstack/barbican-db-sync-jphnh" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.347356 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-n8vc2"] Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.347678 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" podUID="76fffd4e-e9d7-4dad-8b29-58d71bc2215e" containerName="dnsmasq-dns" containerID="cri-o://c3a43acba6cf4087bbf1acca2e78acf679ce1d36e777c315d95b371efc1b47e9" gracePeriod=10 Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.357580 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/39cdbc20-9bb1-4527-8195-f2b885c676a4-db-sync-config-data\") pod \"barbican-db-sync-jphnh\" (UID: \"39cdbc20-9bb1-4527-8195-f2b885c676a4\") " pod="openstack/barbican-db-sync-jphnh" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.357848 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39cdbc20-9bb1-4527-8195-f2b885c676a4-combined-ca-bundle\") pod \"barbican-db-sync-jphnh\" (UID: \"39cdbc20-9bb1-4527-8195-f2b885c676a4\") " pod="openstack/barbican-db-sync-jphnh" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.362487 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdn58\" (UniqueName: \"kubernetes.io/projected/39cdbc20-9bb1-4527-8195-f2b885c676a4-kube-api-access-tdn58\") pod \"barbican-db-sync-jphnh\" (UID: \"39cdbc20-9bb1-4527-8195-f2b885c676a4\") " pod="openstack/barbican-db-sync-jphnh" Sep 30 20:31:38 crc kubenswrapper[4919]: I0930 20:31:38.444879 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jphnh" Sep 30 20:31:39 crc kubenswrapper[4919]: I0930 20:31:39.276858 4919 generic.go:334] "Generic (PLEG): container finished" podID="76fffd4e-e9d7-4dad-8b29-58d71bc2215e" containerID="c3a43acba6cf4087bbf1acca2e78acf679ce1d36e777c315d95b371efc1b47e9" exitCode=0 Sep 30 20:31:39 crc kubenswrapper[4919]: I0930 20:31:39.276930 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" event={"ID":"76fffd4e-e9d7-4dad-8b29-58d71bc2215e","Type":"ContainerDied","Data":"c3a43acba6cf4087bbf1acca2e78acf679ce1d36e777c315d95b371efc1b47e9"} Sep 30 20:31:39 crc kubenswrapper[4919]: I0930 20:31:39.406638 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" podUID="76fffd4e-e9d7-4dad-8b29-58d71bc2215e" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.135:5353: connect: connection refused" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.721938 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0dfd-account-create-cknrq" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.724892 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.730571 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7448-account-create-sz726" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.781024 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ftr6\" (UniqueName: \"kubernetes.io/projected/ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0-kube-api-access-6ftr6\") pod \"ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0\" (UID: \"ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.781128 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-combined-ca-bundle\") pod \"483b2110-904f-42b0-a634-81b7ee4f6642\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.781201 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-scripts\") pod \"483b2110-904f-42b0-a634-81b7ee4f6642\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.781239 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57pzf\" (UniqueName: \"kubernetes.io/projected/315fa417-7e9e-4c30-ac6e-3dd472837602-kube-api-access-57pzf\") pod \"315fa417-7e9e-4c30-ac6e-3dd472837602\" (UID: \"315fa417-7e9e-4c30-ac6e-3dd472837602\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.781306 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-credential-keys\") pod \"483b2110-904f-42b0-a634-81b7ee4f6642\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.781360 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-config-data\") pod \"483b2110-904f-42b0-a634-81b7ee4f6642\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.781413 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-fernet-keys\") pod \"483b2110-904f-42b0-a634-81b7ee4f6642\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.781470 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nlz4\" (UniqueName: \"kubernetes.io/projected/483b2110-904f-42b0-a634-81b7ee4f6642-kube-api-access-6nlz4\") pod \"483b2110-904f-42b0-a634-81b7ee4f6642\" (UID: \"483b2110-904f-42b0-a634-81b7ee4f6642\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.787775 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-scripts" (OuterVolumeSpecName: "scripts") pod "483b2110-904f-42b0-a634-81b7ee4f6642" (UID: "483b2110-904f-42b0-a634-81b7ee4f6642"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.787833 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0-kube-api-access-6ftr6" (OuterVolumeSpecName: "kube-api-access-6ftr6") pod "ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0" (UID: "ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0"). InnerVolumeSpecName "kube-api-access-6ftr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.788588 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/315fa417-7e9e-4c30-ac6e-3dd472837602-kube-api-access-57pzf" (OuterVolumeSpecName: "kube-api-access-57pzf") pod "315fa417-7e9e-4c30-ac6e-3dd472837602" (UID: "315fa417-7e9e-4c30-ac6e-3dd472837602"). InnerVolumeSpecName "kube-api-access-57pzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.791073 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "483b2110-904f-42b0-a634-81b7ee4f6642" (UID: "483b2110-904f-42b0-a634-81b7ee4f6642"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.813931 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "483b2110-904f-42b0-a634-81b7ee4f6642" (UID: "483b2110-904f-42b0-a634-81b7ee4f6642"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.813980 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/483b2110-904f-42b0-a634-81b7ee4f6642-kube-api-access-6nlz4" (OuterVolumeSpecName: "kube-api-access-6nlz4") pod "483b2110-904f-42b0-a634-81b7ee4f6642" (UID: "483b2110-904f-42b0-a634-81b7ee4f6642"). InnerVolumeSpecName "kube-api-access-6nlz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.847401 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "483b2110-904f-42b0-a634-81b7ee4f6642" (UID: "483b2110-904f-42b0-a634-81b7ee4f6642"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.854393 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-config-data" (OuterVolumeSpecName: "config-data") pod "483b2110-904f-42b0-a634-81b7ee4f6642" (UID: "483b2110-904f-42b0-a634-81b7ee4f6642"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.883067 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.883102 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57pzf\" (UniqueName: \"kubernetes.io/projected/315fa417-7e9e-4c30-ac6e-3dd472837602-kube-api-access-57pzf\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.883115 4919 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.883126 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.883136 4919 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.883145 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nlz4\" (UniqueName: \"kubernetes.io/projected/483b2110-904f-42b0-a634-81b7ee4f6642-kube-api-access-6nlz4\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.883154 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ftr6\" (UniqueName: \"kubernetes.io/projected/ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0-kube-api-access-6ftr6\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.883162 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/483b2110-904f-42b0-a634-81b7ee4f6642-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.908955 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.984049 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-ovsdbserver-sb\") pod \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.984587 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-dns-swift-storage-0\") pod \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.984613 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-ovsdbserver-nb\") pod \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.984650 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-config\") pod \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.984710 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-dns-svc\") pod \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.984763 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrjnv\" (UniqueName: \"kubernetes.io/projected/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-kube-api-access-wrjnv\") pod \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\" (UID: \"76fffd4e-e9d7-4dad-8b29-58d71bc2215e\") " Sep 30 20:31:40 crc kubenswrapper[4919]: I0930 20:31:40.988648 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-kube-api-access-wrjnv" (OuterVolumeSpecName: "kube-api-access-wrjnv") pod "76fffd4e-e9d7-4dad-8b29-58d71bc2215e" (UID: "76fffd4e-e9d7-4dad-8b29-58d71bc2215e"). InnerVolumeSpecName "kube-api-access-wrjnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.029917 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "76fffd4e-e9d7-4dad-8b29-58d71bc2215e" (UID: "76fffd4e-e9d7-4dad-8b29-58d71bc2215e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.047316 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76fffd4e-e9d7-4dad-8b29-58d71bc2215e" (UID: "76fffd4e-e9d7-4dad-8b29-58d71bc2215e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.048103 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "76fffd4e-e9d7-4dad-8b29-58d71bc2215e" (UID: "76fffd4e-e9d7-4dad-8b29-58d71bc2215e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.048507 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-config" (OuterVolumeSpecName: "config") pod "76fffd4e-e9d7-4dad-8b29-58d71bc2215e" (UID: "76fffd4e-e9d7-4dad-8b29-58d71bc2215e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.057759 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-jphnh"] Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.075694 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "76fffd4e-e9d7-4dad-8b29-58d71bc2215e" (UID: "76fffd4e-e9d7-4dad-8b29-58d71bc2215e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.086317 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.086352 4919 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.086363 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.086372 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.086382 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.086391 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrjnv\" (UniqueName: \"kubernetes.io/projected/76fffd4e-e9d7-4dad-8b29-58d71bc2215e-kube-api-access-wrjnv\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.303712 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"42ca60af-7b90-492d-9f42-ba79af142539","Type":"ContainerStarted","Data":"55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937"} Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.303809 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="42ca60af-7b90-492d-9f42-ba79af142539" containerName="glance-log" containerID="cri-o://b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd" gracePeriod=30 Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.303914 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="42ca60af-7b90-492d-9f42-ba79af142539" containerName="glance-httpd" containerID="cri-o://55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937" gracePeriod=30 Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.318785 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-0dfd-account-create-cknrq" event={"ID":"315fa417-7e9e-4c30-ac6e-3dd472837602","Type":"ContainerDied","Data":"9f7f7b4e167e4ffc56a05b97e071caf3d170bdc691e341a6d2069c0b18dbc445"} Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.318832 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f7f7b4e167e4ffc56a05b97e071caf3d170bdc691e341a6d2069c0b18dbc445" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.318797 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-0dfd-account-create-cknrq" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.326884 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-vnrf4" event={"ID":"7db423ab-427e-425f-a5d4-10ec71302c12","Type":"ContainerStarted","Data":"86c4b320d58bfc6e95fb173638d692872c481fabdc18fde363cd7e6907de54f9"} Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.334886 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-qd4pk" event={"ID":"483b2110-904f-42b0-a634-81b7ee4f6642","Type":"ContainerDied","Data":"5265a77faa52764721c63defefbb8822b83b73c31b11008824dd4b4ee8b576a0"} Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.334926 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5265a77faa52764721c63defefbb8822b83b73c31b11008824dd4b4ee8b576a0" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.334994 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-qd4pk" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.339085 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=11.339064633 podStartE2EDuration="11.339064633s" podCreationTimestamp="2025-09-30 20:31:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:31:41.324712362 +0000 UTC m=+1086.440745499" watchObservedRunningTime="2025-09-30 20:31:41.339064633 +0000 UTC m=+1086.455097760" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.347091 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0f3ff6e5-7345-4844-89cd-460533571f13","Type":"ContainerStarted","Data":"5831ec7e4000643d55880a8d9dbeb3a953b28a383b8faff65eb1aa17dbd5d9b4"} Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.347250 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0f3ff6e5-7345-4844-89cd-460533571f13" containerName="glance-log" containerID="cri-o://47dbeac33623f0ab547308a65d977164431e92a885fbbd058b191a63e4375763" gracePeriod=30 Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.347328 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0f3ff6e5-7345-4844-89cd-460533571f13" containerName="glance-httpd" containerID="cri-o://5831ec7e4000643d55880a8d9dbeb3a953b28a383b8faff65eb1aa17dbd5d9b4" gracePeriod=30 Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.359259 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.359335 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77585f5f8c-n8vc2" event={"ID":"76fffd4e-e9d7-4dad-8b29-58d71bc2215e","Type":"ContainerDied","Data":"2227da2729d3c56dd1349fa4422ca0ffcb44bca3043de5444d4046ccf32af930"} Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.359406 4919 scope.go:117] "RemoveContainer" containerID="c3a43acba6cf4087bbf1acca2e78acf679ce1d36e777c315d95b371efc1b47e9" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.360914 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-vnrf4" podStartSLOduration=2.615225918 podStartE2EDuration="11.360903409s" podCreationTimestamp="2025-09-30 20:31:30 +0000 UTC" firstStartedPulling="2025-09-30 20:31:31.797472848 +0000 UTC m=+1076.913505975" lastFinishedPulling="2025-09-30 20:31:40.543150329 +0000 UTC m=+1085.659183466" observedRunningTime="2025-09-30 20:31:41.34731393 +0000 UTC m=+1086.463347057" watchObservedRunningTime="2025-09-30 20:31:41.360903409 +0000 UTC m=+1086.476936526" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.381115 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=11.381096349 podStartE2EDuration="11.381096349s" podCreationTimestamp="2025-09-30 20:31:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:31:41.364592045 +0000 UTC m=+1086.480625172" watchObservedRunningTime="2025-09-30 20:31:41.381096349 +0000 UTC m=+1086.497129476" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.381959 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2","Type":"ContainerStarted","Data":"96cfa8db319d6c6d8ddfed9bf197f2b68ab300628e5f01c787b4e81b2d06b5cb"} Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.383816 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jphnh" event={"ID":"39cdbc20-9bb1-4527-8195-f2b885c676a4","Type":"ContainerStarted","Data":"2a65801e9136c8819e83cca31871ddebcb21a7d823cd5dece9998704514d4bf7"} Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.386725 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7448-account-create-sz726" event={"ID":"ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0","Type":"ContainerDied","Data":"88ba2bc298ccf04f0c7fff2aadcb2ddeb54a6516902a229308c86b608e4420c4"} Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.386766 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88ba2bc298ccf04f0c7fff2aadcb2ddeb54a6516902a229308c86b608e4420c4" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.386849 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7448-account-create-sz726" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.587841 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-n8vc2"] Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.593630 4919 scope.go:117] "RemoveContainer" containerID="2653ce2cc5a72e4b96caa75f96969ea5326a5b48c523874688f9df7e466f3312" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.594679 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77585f5f8c-n8vc2"] Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.644259 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76fffd4e-e9d7-4dad-8b29-58d71bc2215e" path="/var/lib/kubelet/pods/76fffd4e-e9d7-4dad-8b29-58d71bc2215e/volumes" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.868713 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-qd4pk"] Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.882592 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-qd4pk"] Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.921166 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-gtwwt"] Sep 30 20:31:41 crc kubenswrapper[4919]: E0930 20:31:41.922180 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76fffd4e-e9d7-4dad-8b29-58d71bc2215e" containerName="init" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.922195 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="76fffd4e-e9d7-4dad-8b29-58d71bc2215e" containerName="init" Sep 30 20:31:41 crc kubenswrapper[4919]: E0930 20:31:41.922259 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315fa417-7e9e-4c30-ac6e-3dd472837602" containerName="mariadb-account-create" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.922266 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="315fa417-7e9e-4c30-ac6e-3dd472837602" containerName="mariadb-account-create" Sep 30 20:31:41 crc kubenswrapper[4919]: E0930 20:31:41.922286 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76fffd4e-e9d7-4dad-8b29-58d71bc2215e" containerName="dnsmasq-dns" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.922295 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="76fffd4e-e9d7-4dad-8b29-58d71bc2215e" containerName="dnsmasq-dns" Sep 30 20:31:41 crc kubenswrapper[4919]: E0930 20:31:41.922338 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="483b2110-904f-42b0-a634-81b7ee4f6642" containerName="keystone-bootstrap" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.922348 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="483b2110-904f-42b0-a634-81b7ee4f6642" containerName="keystone-bootstrap" Sep 30 20:31:41 crc kubenswrapper[4919]: E0930 20:31:41.922366 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0" containerName="mariadb-account-create" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.922374 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0" containerName="mariadb-account-create" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.922679 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="76fffd4e-e9d7-4dad-8b29-58d71bc2215e" containerName="dnsmasq-dns" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.922717 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0" containerName="mariadb-account-create" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.922728 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="483b2110-904f-42b0-a634-81b7ee4f6642" containerName="keystone-bootstrap" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.922738 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="315fa417-7e9e-4c30-ac6e-3dd472837602" containerName="mariadb-account-create" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.923696 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.926283 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.926479 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-76kff" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.929972 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.938106 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gtwwt"] Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.938129 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 30 20:31:41 crc kubenswrapper[4919]: I0930 20:31:41.957520 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.009287 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-config-data\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.009330 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-credential-keys\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.009350 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-fernet-keys\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.009377 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-combined-ca-bundle\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.009745 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx2c5\" (UniqueName: \"kubernetes.io/projected/9d738f62-6454-4ed9-a506-a3ffda2df598-kube-api-access-qx2c5\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.009844 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-scripts\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.111897 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"42ca60af-7b90-492d-9f42-ba79af142539\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.111951 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-combined-ca-bundle\") pod \"42ca60af-7b90-492d-9f42-ba79af142539\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.112645 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42ca60af-7b90-492d-9f42-ba79af142539-logs\") pod \"42ca60af-7b90-492d-9f42-ba79af142539\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.112802 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42ca60af-7b90-492d-9f42-ba79af142539-logs" (OuterVolumeSpecName: "logs") pod "42ca60af-7b90-492d-9f42-ba79af142539" (UID: "42ca60af-7b90-492d-9f42-ba79af142539"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.112890 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-scripts\") pod \"42ca60af-7b90-492d-9f42-ba79af142539\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.112972 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/42ca60af-7b90-492d-9f42-ba79af142539-httpd-run\") pod \"42ca60af-7b90-492d-9f42-ba79af142539\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.113186 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-config-data\") pod \"42ca60af-7b90-492d-9f42-ba79af142539\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.113276 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x64fw\" (UniqueName: \"kubernetes.io/projected/42ca60af-7b90-492d-9f42-ba79af142539-kube-api-access-x64fw\") pod \"42ca60af-7b90-492d-9f42-ba79af142539\" (UID: \"42ca60af-7b90-492d-9f42-ba79af142539\") " Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.113502 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42ca60af-7b90-492d-9f42-ba79af142539-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "42ca60af-7b90-492d-9f42-ba79af142539" (UID: "42ca60af-7b90-492d-9f42-ba79af142539"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.113706 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx2c5\" (UniqueName: \"kubernetes.io/projected/9d738f62-6454-4ed9-a506-a3ffda2df598-kube-api-access-qx2c5\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.113796 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-scripts\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.113887 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-config-data\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.113922 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-credential-keys\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.113960 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-fernet-keys\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.114011 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-combined-ca-bundle\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.114187 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42ca60af-7b90-492d-9f42-ba79af142539-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.114229 4919 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/42ca60af-7b90-492d-9f42-ba79af142539-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.117076 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "42ca60af-7b90-492d-9f42-ba79af142539" (UID: "42ca60af-7b90-492d-9f42-ba79af142539"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.117240 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-scripts" (OuterVolumeSpecName: "scripts") pod "42ca60af-7b90-492d-9f42-ba79af142539" (UID: "42ca60af-7b90-492d-9f42-ba79af142539"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.118877 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-scripts\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.121398 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-credential-keys\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.125096 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-combined-ca-bundle\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.126961 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42ca60af-7b90-492d-9f42-ba79af142539-kube-api-access-x64fw" (OuterVolumeSpecName: "kube-api-access-x64fw") pod "42ca60af-7b90-492d-9f42-ba79af142539" (UID: "42ca60af-7b90-492d-9f42-ba79af142539"). InnerVolumeSpecName "kube-api-access-x64fw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.129062 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-fernet-keys\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.130930 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-config-data\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.133972 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx2c5\" (UniqueName: \"kubernetes.io/projected/9d738f62-6454-4ed9-a506-a3ffda2df598-kube-api-access-qx2c5\") pod \"keystone-bootstrap-gtwwt\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.159328 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "42ca60af-7b90-492d-9f42-ba79af142539" (UID: "42ca60af-7b90-492d-9f42-ba79af142539"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.159620 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-config-data" (OuterVolumeSpecName: "config-data") pod "42ca60af-7b90-492d-9f42-ba79af142539" (UID: "42ca60af-7b90-492d-9f42-ba79af142539"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.215833 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.215871 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.215887 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x64fw\" (UniqueName: \"kubernetes.io/projected/42ca60af-7b90-492d-9f42-ba79af142539-kube-api-access-x64fw\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.216259 4919 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.216394 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42ca60af-7b90-492d-9f42-ba79af142539-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.237377 4919 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.267831 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.318465 4919 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.396207 4919 generic.go:334] "Generic (PLEG): container finished" podID="42ca60af-7b90-492d-9f42-ba79af142539" containerID="55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937" exitCode=143 Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.396531 4919 generic.go:334] "Generic (PLEG): container finished" podID="42ca60af-7b90-492d-9f42-ba79af142539" containerID="b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd" exitCode=143 Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.396244 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"42ca60af-7b90-492d-9f42-ba79af142539","Type":"ContainerDied","Data":"55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937"} Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.396595 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"42ca60af-7b90-492d-9f42-ba79af142539","Type":"ContainerDied","Data":"b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd"} Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.396611 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"42ca60af-7b90-492d-9f42-ba79af142539","Type":"ContainerDied","Data":"14edeb59b396b5df7d35b063f127a8b4450ec006e2ac55f19c5ca3864cda972f"} Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.396627 4919 scope.go:117] "RemoveContainer" containerID="55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.396323 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.401509 4919 generic.go:334] "Generic (PLEG): container finished" podID="0f3ff6e5-7345-4844-89cd-460533571f13" containerID="5831ec7e4000643d55880a8d9dbeb3a953b28a383b8faff65eb1aa17dbd5d9b4" exitCode=143 Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.401563 4919 generic.go:334] "Generic (PLEG): container finished" podID="0f3ff6e5-7345-4844-89cd-460533571f13" containerID="47dbeac33623f0ab547308a65d977164431e92a885fbbd058b191a63e4375763" exitCode=143 Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.401617 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0f3ff6e5-7345-4844-89cd-460533571f13","Type":"ContainerDied","Data":"5831ec7e4000643d55880a8d9dbeb3a953b28a383b8faff65eb1aa17dbd5d9b4"} Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.401657 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0f3ff6e5-7345-4844-89cd-460533571f13","Type":"ContainerDied","Data":"47dbeac33623f0ab547308a65d977164431e92a885fbbd058b191a63e4375763"} Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.443627 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.444513 4919 scope.go:117] "RemoveContainer" containerID="b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.451129 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.460344 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:31:42 crc kubenswrapper[4919]: E0930 20:31:42.460670 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42ca60af-7b90-492d-9f42-ba79af142539" containerName="glance-log" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.460686 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="42ca60af-7b90-492d-9f42-ba79af142539" containerName="glance-log" Sep 30 20:31:42 crc kubenswrapper[4919]: E0930 20:31:42.460713 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42ca60af-7b90-492d-9f42-ba79af142539" containerName="glance-httpd" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.460718 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="42ca60af-7b90-492d-9f42-ba79af142539" containerName="glance-httpd" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.460875 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="42ca60af-7b90-492d-9f42-ba79af142539" containerName="glance-httpd" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.460888 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="42ca60af-7b90-492d-9f42-ba79af142539" containerName="glance-log" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.461697 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.467007 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.467330 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.484083 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.522613 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/792b4f4f-31af-4f32-a7b9-2af615779e95-logs\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.522676 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.522721 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.522824 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-scripts\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.522868 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhq5v\" (UniqueName: \"kubernetes.io/projected/792b4f4f-31af-4f32-a7b9-2af615779e95-kube-api-access-zhq5v\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.522931 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-config-data\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.522956 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.522992 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/792b4f4f-31af-4f32-a7b9-2af615779e95-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.565791 4919 scope.go:117] "RemoveContainer" containerID="55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937" Sep 30 20:31:42 crc kubenswrapper[4919]: E0930 20:31:42.567071 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937\": container with ID starting with 55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937 not found: ID does not exist" containerID="55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.567109 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937"} err="failed to get container status \"55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937\": rpc error: code = NotFound desc = could not find container \"55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937\": container with ID starting with 55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937 not found: ID does not exist" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.567137 4919 scope.go:117] "RemoveContainer" containerID="b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd" Sep 30 20:31:42 crc kubenswrapper[4919]: E0930 20:31:42.567729 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd\": container with ID starting with b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd not found: ID does not exist" containerID="b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.567763 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd"} err="failed to get container status \"b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd\": rpc error: code = NotFound desc = could not find container \"b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd\": container with ID starting with b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd not found: ID does not exist" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.567781 4919 scope.go:117] "RemoveContainer" containerID="55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.570528 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937"} err="failed to get container status \"55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937\": rpc error: code = NotFound desc = could not find container \"55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937\": container with ID starting with 55593ccc367715053bac0a995c5c3e9da8e023353f5981764314904851524937 not found: ID does not exist" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.570576 4919 scope.go:117] "RemoveContainer" containerID="b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.571068 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd"} err="failed to get container status \"b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd\": rpc error: code = NotFound desc = could not find container \"b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd\": container with ID starting with b970b51789532f47bd9d88d1aca4a0df78c8f87b53de56255871fdeca68725fd not found: ID does not exist" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.624758 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/792b4f4f-31af-4f32-a7b9-2af615779e95-logs\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.624810 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.624849 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.624902 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-scripts\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.624929 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhq5v\" (UniqueName: \"kubernetes.io/projected/792b4f4f-31af-4f32-a7b9-2af615779e95-kube-api-access-zhq5v\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.624962 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-config-data\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.624976 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.625000 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/792b4f4f-31af-4f32-a7b9-2af615779e95-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.625418 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/792b4f4f-31af-4f32-a7b9-2af615779e95-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.625615 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/792b4f4f-31af-4f32-a7b9-2af615779e95-logs\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.626688 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.634241 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.634569 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-config-data\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.647504 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.653407 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-scripts\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.654132 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhq5v\" (UniqueName: \"kubernetes.io/projected/792b4f4f-31af-4f32-a7b9-2af615779e95-kube-api-access-zhq5v\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.655334 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.807805 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-gtwwt"] Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.889230 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:31:42 crc kubenswrapper[4919]: I0930 20:31:42.916629 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.032778 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f3ff6e5-7345-4844-89cd-460533571f13-httpd-run\") pod \"0f3ff6e5-7345-4844-89cd-460533571f13\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.032816 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f3ff6e5-7345-4844-89cd-460533571f13-logs\") pod \"0f3ff6e5-7345-4844-89cd-460533571f13\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.032870 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-scripts\") pod \"0f3ff6e5-7345-4844-89cd-460533571f13\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.032925 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-config-data\") pod \"0f3ff6e5-7345-4844-89cd-460533571f13\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.032984 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jmlg\" (UniqueName: \"kubernetes.io/projected/0f3ff6e5-7345-4844-89cd-460533571f13-kube-api-access-9jmlg\") pod \"0f3ff6e5-7345-4844-89cd-460533571f13\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.033021 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"0f3ff6e5-7345-4844-89cd-460533571f13\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.033071 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-combined-ca-bundle\") pod \"0f3ff6e5-7345-4844-89cd-460533571f13\" (UID: \"0f3ff6e5-7345-4844-89cd-460533571f13\") " Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.033144 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f3ff6e5-7345-4844-89cd-460533571f13-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0f3ff6e5-7345-4844-89cd-460533571f13" (UID: "0f3ff6e5-7345-4844-89cd-460533571f13"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.033425 4919 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f3ff6e5-7345-4844-89cd-460533571f13-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.034537 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f3ff6e5-7345-4844-89cd-460533571f13-logs" (OuterVolumeSpecName: "logs") pod "0f3ff6e5-7345-4844-89cd-460533571f13" (UID: "0f3ff6e5-7345-4844-89cd-460533571f13"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.038097 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-scripts" (OuterVolumeSpecName: "scripts") pod "0f3ff6e5-7345-4844-89cd-460533571f13" (UID: "0f3ff6e5-7345-4844-89cd-460533571f13"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.039161 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "0f3ff6e5-7345-4844-89cd-460533571f13" (UID: "0f3ff6e5-7345-4844-89cd-460533571f13"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.044443 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f3ff6e5-7345-4844-89cd-460533571f13-kube-api-access-9jmlg" (OuterVolumeSpecName: "kube-api-access-9jmlg") pod "0f3ff6e5-7345-4844-89cd-460533571f13" (UID: "0f3ff6e5-7345-4844-89cd-460533571f13"). InnerVolumeSpecName "kube-api-access-9jmlg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.100397 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f3ff6e5-7345-4844-89cd-460533571f13" (UID: "0f3ff6e5-7345-4844-89cd-460533571f13"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.121863 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-config-data" (OuterVolumeSpecName: "config-data") pod "0f3ff6e5-7345-4844-89cd-460533571f13" (UID: "0f3ff6e5-7345-4844-89cd-460533571f13"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.135640 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f3ff6e5-7345-4844-89cd-460533571f13-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.135675 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.135690 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.135704 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jmlg\" (UniqueName: \"kubernetes.io/projected/0f3ff6e5-7345-4844-89cd-460533571f13-kube-api-access-9jmlg\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.135737 4919 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.135751 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f3ff6e5-7345-4844-89cd-460533571f13-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.162189 4919 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.220482 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-sp45k"] Sep 30 20:31:43 crc kubenswrapper[4919]: E0930 20:31:43.220847 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f3ff6e5-7345-4844-89cd-460533571f13" containerName="glance-log" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.220864 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f3ff6e5-7345-4844-89cd-460533571f13" containerName="glance-log" Sep 30 20:31:43 crc kubenswrapper[4919]: E0930 20:31:43.220873 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f3ff6e5-7345-4844-89cd-460533571f13" containerName="glance-httpd" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.220879 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f3ff6e5-7345-4844-89cd-460533571f13" containerName="glance-httpd" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.221059 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f3ff6e5-7345-4844-89cd-460533571f13" containerName="glance-log" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.221084 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f3ff6e5-7345-4844-89cd-460533571f13" containerName="glance-httpd" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.221774 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.224269 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.224434 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.224630 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zdqcx" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.230360 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-sp45k"] Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.237389 4919 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.338451 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-etc-machine-id\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.338522 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-config-data\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.338673 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wlgl\" (UniqueName: \"kubernetes.io/projected/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-kube-api-access-8wlgl\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.338719 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-scripts\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.338760 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-combined-ca-bundle\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.338945 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-db-sync-config-data\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.415988 4919 generic.go:334] "Generic (PLEG): container finished" podID="7db423ab-427e-425f-a5d4-10ec71302c12" containerID="86c4b320d58bfc6e95fb173638d692872c481fabdc18fde363cd7e6907de54f9" exitCode=0 Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.416088 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-vnrf4" event={"ID":"7db423ab-427e-425f-a5d4-10ec71302c12","Type":"ContainerDied","Data":"86c4b320d58bfc6e95fb173638d692872c481fabdc18fde363cd7e6907de54f9"} Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.420080 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gtwwt" event={"ID":"9d738f62-6454-4ed9-a506-a3ffda2df598","Type":"ContainerStarted","Data":"08dc050f2ff3494df08cbc51f4613bf1a4cc13347e0567fdc628b577360b1de1"} Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.420122 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gtwwt" event={"ID":"9d738f62-6454-4ed9-a506-a3ffda2df598","Type":"ContainerStarted","Data":"3a615974cfeb60cae537d38665703da2911ea5fac1d8d9181f4ea9143b35c928"} Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.423796 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0f3ff6e5-7345-4844-89cd-460533571f13","Type":"ContainerDied","Data":"f323eff56f71b8041be4ac09e16aba27b3a1d7e55b14f1b620252be8dcb77a3b"} Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.423807 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.423844 4919 scope.go:117] "RemoveContainer" containerID="5831ec7e4000643d55880a8d9dbeb3a953b28a383b8faff65eb1aa17dbd5d9b4" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.429862 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:31:43 crc kubenswrapper[4919]: W0930 20:31:43.437437 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod792b4f4f_31af_4f32_a7b9_2af615779e95.slice/crio-42f6e79802e75751b94a5f806192e4f9c05f57d03326f34cbd894b0f592ae9a5 WatchSource:0}: Error finding container 42f6e79802e75751b94a5f806192e4f9c05f57d03326f34cbd894b0f592ae9a5: Status 404 returned error can't find the container with id 42f6e79802e75751b94a5f806192e4f9c05f57d03326f34cbd894b0f592ae9a5 Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.440205 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-db-sync-config-data\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.440286 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-etc-machine-id\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.440322 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-config-data\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.440359 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wlgl\" (UniqueName: \"kubernetes.io/projected/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-kube-api-access-8wlgl\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.440376 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-scripts\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.440397 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-combined-ca-bundle\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.441643 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-etc-machine-id\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.445257 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-config-data\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.447739 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2","Type":"ContainerStarted","Data":"f3c7d5ce118170fb3ca31dfd81f71a4772449a7523e4a659c40302d816a03232"} Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.452255 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-db-sync-config-data\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.456811 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-combined-ca-bundle\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.457795 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-gtwwt" podStartSLOduration=2.457771009 podStartE2EDuration="2.457771009s" podCreationTimestamp="2025-09-30 20:31:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:31:43.450746087 +0000 UTC m=+1088.566779224" watchObservedRunningTime="2025-09-30 20:31:43.457771009 +0000 UTC m=+1088.573804136" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.460599 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-scripts\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.469901 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wlgl\" (UniqueName: \"kubernetes.io/projected/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-kube-api-access-8wlgl\") pod \"cinder-db-sync-sp45k\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.473626 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.478861 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.500294 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.502094 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.505678 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.506154 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.512816 4919 scope.go:117] "RemoveContainer" containerID="47dbeac33623f0ab547308a65d977164431e92a885fbbd058b191a63e4375763" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.526357 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.547791 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sp45k" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.548615 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.549169 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.549331 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/69e2e475-a270-4817-b14b-fbb6d78abfa3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.549447 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.549832 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69e2e475-a270-4817-b14b-fbb6d78abfa3-logs\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.550059 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.550177 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjk29\" (UniqueName: \"kubernetes.io/projected/69e2e475-a270-4817-b14b-fbb6d78abfa3-kube-api-access-qjk29\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.550464 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.609061 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-pl4gj"] Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.613193 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.615128 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-qjgb7" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.615689 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.615887 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.617518 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-pl4gj"] Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.655263 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69e2e475-a270-4817-b14b-fbb6d78abfa3-logs\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.655307 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.655334 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjk29\" (UniqueName: \"kubernetes.io/projected/69e2e475-a270-4817-b14b-fbb6d78abfa3-kube-api-access-qjk29\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.655365 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/737cb8aa-63c3-4a59-893c-3d5075795304-combined-ca-bundle\") pod \"neutron-db-sync-pl4gj\" (UID: \"737cb8aa-63c3-4a59-893c-3d5075795304\") " pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.655384 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.655416 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.655441 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/737cb8aa-63c3-4a59-893c-3d5075795304-config\") pod \"neutron-db-sync-pl4gj\" (UID: \"737cb8aa-63c3-4a59-893c-3d5075795304\") " pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.655677 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvs2v\" (UniqueName: \"kubernetes.io/projected/737cb8aa-63c3-4a59-893c-3d5075795304-kube-api-access-fvs2v\") pod \"neutron-db-sync-pl4gj\" (UID: \"737cb8aa-63c3-4a59-893c-3d5075795304\") " pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.655739 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.655758 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/69e2e475-a270-4817-b14b-fbb6d78abfa3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.655778 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.656719 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69e2e475-a270-4817-b14b-fbb6d78abfa3-logs\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.656868 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/69e2e475-a270-4817-b14b-fbb6d78abfa3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.657058 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.667583 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.668503 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f3ff6e5-7345-4844-89cd-460533571f13" path="/var/lib/kubelet/pods/0f3ff6e5-7345-4844-89cd-460533571f13/volumes" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.674151 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42ca60af-7b90-492d-9f42-ba79af142539" path="/var/lib/kubelet/pods/42ca60af-7b90-492d-9f42-ba79af142539/volumes" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.674935 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="483b2110-904f-42b0-a634-81b7ee4f6642" path="/var/lib/kubelet/pods/483b2110-904f-42b0-a634-81b7ee4f6642/volumes" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.676531 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.682108 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjk29\" (UniqueName: \"kubernetes.io/projected/69e2e475-a270-4817-b14b-fbb6d78abfa3-kube-api-access-qjk29\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.682811 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.686497 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.719971 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.757512 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/737cb8aa-63c3-4a59-893c-3d5075795304-combined-ca-bundle\") pod \"neutron-db-sync-pl4gj\" (UID: \"737cb8aa-63c3-4a59-893c-3d5075795304\") " pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.757595 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/737cb8aa-63c3-4a59-893c-3d5075795304-config\") pod \"neutron-db-sync-pl4gj\" (UID: \"737cb8aa-63c3-4a59-893c-3d5075795304\") " pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.757643 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvs2v\" (UniqueName: \"kubernetes.io/projected/737cb8aa-63c3-4a59-893c-3d5075795304-kube-api-access-fvs2v\") pod \"neutron-db-sync-pl4gj\" (UID: \"737cb8aa-63c3-4a59-893c-3d5075795304\") " pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.762108 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/737cb8aa-63c3-4a59-893c-3d5075795304-combined-ca-bundle\") pod \"neutron-db-sync-pl4gj\" (UID: \"737cb8aa-63c3-4a59-893c-3d5075795304\") " pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.764924 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/737cb8aa-63c3-4a59-893c-3d5075795304-config\") pod \"neutron-db-sync-pl4gj\" (UID: \"737cb8aa-63c3-4a59-893c-3d5075795304\") " pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.772764 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvs2v\" (UniqueName: \"kubernetes.io/projected/737cb8aa-63c3-4a59-893c-3d5075795304-kube-api-access-fvs2v\") pod \"neutron-db-sync-pl4gj\" (UID: \"737cb8aa-63c3-4a59-893c-3d5075795304\") " pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:31:43 crc kubenswrapper[4919]: I0930 20:31:43.866128 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.071412 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.181963 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-sp45k"] Sep 30 20:31:44 crc kubenswrapper[4919]: W0930 20:31:44.190965 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d660eb4_7718_4b2b_a834_9b7d0d2b64a4.slice/crio-33302023c854ad5dafd4f296452531bf4c5aa8889c17e30eefc7eaedf8ff8af4 WatchSource:0}: Error finding container 33302023c854ad5dafd4f296452531bf4c5aa8889c17e30eefc7eaedf8ff8af4: Status 404 returned error can't find the container with id 33302023c854ad5dafd4f296452531bf4c5aa8889c17e30eefc7eaedf8ff8af4 Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.484379 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sp45k" event={"ID":"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4","Type":"ContainerStarted","Data":"33302023c854ad5dafd4f296452531bf4c5aa8889c17e30eefc7eaedf8ff8af4"} Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.490432 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"792b4f4f-31af-4f32-a7b9-2af615779e95","Type":"ContainerStarted","Data":"09366126cc9eafe4b8bf9f0d6429bfd6bfb77fbc8184b11ca9cd980b0f43692b"} Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.490851 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"792b4f4f-31af-4f32-a7b9-2af615779e95","Type":"ContainerStarted","Data":"42f6e79802e75751b94a5f806192e4f9c05f57d03326f34cbd894b0f592ae9a5"} Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.497645 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.550165 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-pl4gj"] Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.783871 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.888574 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-combined-ca-bundle\") pod \"7db423ab-427e-425f-a5d4-10ec71302c12\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.888689 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-scripts\") pod \"7db423ab-427e-425f-a5d4-10ec71302c12\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.888801 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7db423ab-427e-425f-a5d4-10ec71302c12-logs\") pod \"7db423ab-427e-425f-a5d4-10ec71302c12\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.888861 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vpqv\" (UniqueName: \"kubernetes.io/projected/7db423ab-427e-425f-a5d4-10ec71302c12-kube-api-access-9vpqv\") pod \"7db423ab-427e-425f-a5d4-10ec71302c12\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.888894 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-config-data\") pod \"7db423ab-427e-425f-a5d4-10ec71302c12\" (UID: \"7db423ab-427e-425f-a5d4-10ec71302c12\") " Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.890016 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7db423ab-427e-425f-a5d4-10ec71302c12-logs" (OuterVolumeSpecName: "logs") pod "7db423ab-427e-425f-a5d4-10ec71302c12" (UID: "7db423ab-427e-425f-a5d4-10ec71302c12"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.893864 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-scripts" (OuterVolumeSpecName: "scripts") pod "7db423ab-427e-425f-a5d4-10ec71302c12" (UID: "7db423ab-427e-425f-a5d4-10ec71302c12"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.899398 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7db423ab-427e-425f-a5d4-10ec71302c12-kube-api-access-9vpqv" (OuterVolumeSpecName: "kube-api-access-9vpqv") pod "7db423ab-427e-425f-a5d4-10ec71302c12" (UID: "7db423ab-427e-425f-a5d4-10ec71302c12"). InnerVolumeSpecName "kube-api-access-9vpqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.913611 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-config-data" (OuterVolumeSpecName: "config-data") pod "7db423ab-427e-425f-a5d4-10ec71302c12" (UID: "7db423ab-427e-425f-a5d4-10ec71302c12"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.934763 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7db423ab-427e-425f-a5d4-10ec71302c12" (UID: "7db423ab-427e-425f-a5d4-10ec71302c12"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.990745 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7db423ab-427e-425f-a5d4-10ec71302c12-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.990775 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vpqv\" (UniqueName: \"kubernetes.io/projected/7db423ab-427e-425f-a5d4-10ec71302c12-kube-api-access-9vpqv\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.990788 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.990797 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:44 crc kubenswrapper[4919]: I0930 20:31:44.990807 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7db423ab-427e-425f-a5d4-10ec71302c12-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.511365 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"792b4f4f-31af-4f32-a7b9-2af615779e95","Type":"ContainerStarted","Data":"d525eed0dbc2683a937baa5a4d92c94dc0dafc867dbcd9bb401b42bc1d42ef09"} Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.524716 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"69e2e475-a270-4817-b14b-fbb6d78abfa3","Type":"ContainerStarted","Data":"93c16f287f1951d83dcf001b24d242503e0bba26feb852174ac0f9f76c3dd18d"} Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.524760 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"69e2e475-a270-4817-b14b-fbb6d78abfa3","Type":"ContainerStarted","Data":"fe08c8ac4b28d099463ea79311a6b2a10d4a883fde98fbb7cfb6223b4b40c495"} Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.525963 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pl4gj" event={"ID":"737cb8aa-63c3-4a59-893c-3d5075795304","Type":"ContainerStarted","Data":"f2e6cd8e280ead561c63fbecc47f9cf11fdc13114c08ceefc02b347301650935"} Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.526005 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pl4gj" event={"ID":"737cb8aa-63c3-4a59-893c-3d5075795304","Type":"ContainerStarted","Data":"3cc34d7cce8732d3c5d9d1823b06f85cfa23b88dd2fabcc6f04a63496e72e400"} Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.527064 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-vnrf4" event={"ID":"7db423ab-427e-425f-a5d4-10ec71302c12","Type":"ContainerDied","Data":"e5eb01d85eadd08b4bb52e97a2e3657b9c60644b56aa5a5b54be103d2ce0e325"} Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.527085 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5eb01d85eadd08b4bb52e97a2e3657b9c60644b56aa5a5b54be103d2ce0e325" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.527114 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-vnrf4" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.533730 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6f9cd6fc64-z8qnp"] Sep 30 20:31:45 crc kubenswrapper[4919]: E0930 20:31:45.534155 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db423ab-427e-425f-a5d4-10ec71302c12" containerName="placement-db-sync" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.534171 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db423ab-427e-425f-a5d4-10ec71302c12" containerName="placement-db-sync" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.534369 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db423ab-427e-425f-a5d4-10ec71302c12" containerName="placement-db-sync" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.535253 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.547000 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.547142 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.547142 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.547167 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.547231 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-pjdx6" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.549737 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6f9cd6fc64-z8qnp"] Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.551521 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.551503238 podStartE2EDuration="3.551503238s" podCreationTimestamp="2025-09-30 20:31:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:31:45.54597504 +0000 UTC m=+1090.662008187" watchObservedRunningTime="2025-09-30 20:31:45.551503238 +0000 UTC m=+1090.667536355" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.599027 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-public-tls-certs\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.599061 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-config-data\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.599081 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-internal-tls-certs\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.599134 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-combined-ca-bundle\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.599156 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-logs\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.599172 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-scripts\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.599204 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58bkt\" (UniqueName: \"kubernetes.io/projected/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-kube-api-access-58bkt\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.614257 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-pl4gj" podStartSLOduration=2.614230857 podStartE2EDuration="2.614230857s" podCreationTimestamp="2025-09-30 20:31:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:31:45.594534582 +0000 UTC m=+1090.710567709" watchObservedRunningTime="2025-09-30 20:31:45.614230857 +0000 UTC m=+1090.730263994" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.701200 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-public-tls-certs\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.701252 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-config-data\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.701281 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-internal-tls-certs\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.701312 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-combined-ca-bundle\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.701337 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-logs\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.701372 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-scripts\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.701438 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58bkt\" (UniqueName: \"kubernetes.io/projected/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-kube-api-access-58bkt\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.702286 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-logs\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.706737 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-internal-tls-certs\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.707062 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-combined-ca-bundle\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.707643 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-config-data\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.709147 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-scripts\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.718603 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-public-tls-certs\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.723746 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58bkt\" (UniqueName: \"kubernetes.io/projected/7e3e697a-4e47-48d5-a3f9-ae4d4a772f60-kube-api-access-58bkt\") pod \"placement-6f9cd6fc64-z8qnp\" (UID: \"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60\") " pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:45 crc kubenswrapper[4919]: I0930 20:31:45.893058 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:46 crc kubenswrapper[4919]: I0930 20:31:46.537118 4919 generic.go:334] "Generic (PLEG): container finished" podID="9d738f62-6454-4ed9-a506-a3ffda2df598" containerID="08dc050f2ff3494df08cbc51f4613bf1a4cc13347e0567fdc628b577360b1de1" exitCode=0 Sep 30 20:31:46 crc kubenswrapper[4919]: I0930 20:31:46.537173 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gtwwt" event={"ID":"9d738f62-6454-4ed9-a506-a3ffda2df598","Type":"ContainerDied","Data":"08dc050f2ff3494df08cbc51f4613bf1a4cc13347e0567fdc628b577360b1de1"} Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.482948 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.548748 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-credential-keys\") pod \"9d738f62-6454-4ed9-a506-a3ffda2df598\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.548849 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-config-data\") pod \"9d738f62-6454-4ed9-a506-a3ffda2df598\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.555460 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-gtwwt" event={"ID":"9d738f62-6454-4ed9-a506-a3ffda2df598","Type":"ContainerDied","Data":"3a615974cfeb60cae537d38665703da2911ea5fac1d8d9181f4ea9143b35c928"} Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.555508 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a615974cfeb60cae537d38665703da2911ea5fac1d8d9181f4ea9143b35c928" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.555580 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-gtwwt" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.556002 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "9d738f62-6454-4ed9-a506-a3ffda2df598" (UID: "9d738f62-6454-4ed9-a506-a3ffda2df598"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.608477 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-config-data" (OuterVolumeSpecName: "config-data") pod "9d738f62-6454-4ed9-a506-a3ffda2df598" (UID: "9d738f62-6454-4ed9-a506-a3ffda2df598"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.650317 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qx2c5\" (UniqueName: \"kubernetes.io/projected/9d738f62-6454-4ed9-a506-a3ffda2df598-kube-api-access-qx2c5\") pod \"9d738f62-6454-4ed9-a506-a3ffda2df598\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.650413 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-fernet-keys\") pod \"9d738f62-6454-4ed9-a506-a3ffda2df598\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.650448 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-scripts\") pod \"9d738f62-6454-4ed9-a506-a3ffda2df598\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.650471 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-combined-ca-bundle\") pod \"9d738f62-6454-4ed9-a506-a3ffda2df598\" (UID: \"9d738f62-6454-4ed9-a506-a3ffda2df598\") " Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.650957 4919 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.650983 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.658654 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d738f62-6454-4ed9-a506-a3ffda2df598-kube-api-access-qx2c5" (OuterVolumeSpecName: "kube-api-access-qx2c5") pod "9d738f62-6454-4ed9-a506-a3ffda2df598" (UID: "9d738f62-6454-4ed9-a506-a3ffda2df598"). InnerVolumeSpecName "kube-api-access-qx2c5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.659864 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-659c895849-vsrcz"] Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.659943 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9d738f62-6454-4ed9-a506-a3ffda2df598" (UID: "9d738f62-6454-4ed9-a506-a3ffda2df598"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:48 crc kubenswrapper[4919]: E0930 20:31:48.660407 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d738f62-6454-4ed9-a506-a3ffda2df598" containerName="keystone-bootstrap" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.660431 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d738f62-6454-4ed9-a506-a3ffda2df598" containerName="keystone-bootstrap" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.660649 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d738f62-6454-4ed9-a506-a3ffda2df598" containerName="keystone-bootstrap" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.661426 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-scripts" (OuterVolumeSpecName: "scripts") pod "9d738f62-6454-4ed9-a506-a3ffda2df598" (UID: "9d738f62-6454-4ed9-a506-a3ffda2df598"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.663725 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.672230 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-659c895849-vsrcz"] Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.674558 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.674638 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.710464 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d738f62-6454-4ed9-a506-a3ffda2df598" (UID: "9d738f62-6454-4ed9-a506-a3ffda2df598"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.753446 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qx2c5\" (UniqueName: \"kubernetes.io/projected/9d738f62-6454-4ed9-a506-a3ffda2df598-kube-api-access-qx2c5\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.753483 4919 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.753497 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.753511 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d738f62-6454-4ed9-a506-a3ffda2df598-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.855449 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-credential-keys\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.855495 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-config-data\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.855510 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-public-tls-certs\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.855706 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2nq8\" (UniqueName: \"kubernetes.io/projected/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-kube-api-access-c2nq8\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.855825 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-fernet-keys\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.855895 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-combined-ca-bundle\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.855930 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-internal-tls-certs\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.855969 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-scripts\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.957967 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-fernet-keys\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.958076 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-combined-ca-bundle\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.958768 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-internal-tls-certs\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.958807 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-scripts\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.958918 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-credential-keys\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.958949 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-config-data\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.958970 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-public-tls-certs\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.958998 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2nq8\" (UniqueName: \"kubernetes.io/projected/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-kube-api-access-c2nq8\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.962785 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-combined-ca-bundle\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.963179 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-fernet-keys\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.963607 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-config-data\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.964075 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-internal-tls-certs\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.964166 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-public-tls-certs\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.966463 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-credential-keys\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.972583 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-scripts\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:48 crc kubenswrapper[4919]: I0930 20:31:48.973914 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2nq8\" (UniqueName: \"kubernetes.io/projected/b8f18d42-d7ea-42d1-bbcb-a81afc1b0508-kube-api-access-c2nq8\") pod \"keystone-659c895849-vsrcz\" (UID: \"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508\") " pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:49 crc kubenswrapper[4919]: I0930 20:31:49.022784 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.404570 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6f9cd6fc64-z8qnp"] Sep 30 20:31:52 crc kubenswrapper[4919]: W0930 20:31:52.409063 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e3e697a_4e47_48d5_a3f9_ae4d4a772f60.slice/crio-8af16cf9496358f11fd5d229ccb4c36282495f0abcbc4068a9026212978d6890 WatchSource:0}: Error finding container 8af16cf9496358f11fd5d229ccb4c36282495f0abcbc4068a9026212978d6890: Status 404 returned error can't find the container with id 8af16cf9496358f11fd5d229ccb4c36282495f0abcbc4068a9026212978d6890 Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.443603 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-659c895849-vsrcz"] Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.594300 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2","Type":"ContainerStarted","Data":"d4b547265ce318392550a8af579d3b8716723076d7ad569522200356991a0dce"} Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.595743 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-659c895849-vsrcz" event={"ID":"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508","Type":"ContainerStarted","Data":"18ddade154108d2ef59b53b798d5653a74cb5b5acf1d083f3c4ecd8727519c1a"} Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.598284 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jphnh" event={"ID":"39cdbc20-9bb1-4527-8195-f2b885c676a4","Type":"ContainerStarted","Data":"8e4b6f20e5c822c9f587c4cac1efc4bb6ec2f33f860884d74dfb01efec1bfee7"} Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.603046 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6f9cd6fc64-z8qnp" event={"ID":"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60","Type":"ContainerStarted","Data":"8af16cf9496358f11fd5d229ccb4c36282495f0abcbc4068a9026212978d6890"} Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.607918 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"69e2e475-a270-4817-b14b-fbb6d78abfa3","Type":"ContainerStarted","Data":"cd695b0b60075119c80f53c247d13812b6754bdc0aba11733eae3260ba76972e"} Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.621303 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-jphnh" podStartSLOduration=3.772126451 podStartE2EDuration="14.621281911s" podCreationTimestamp="2025-09-30 20:31:38 +0000 UTC" firstStartedPulling="2025-09-30 20:31:41.067164196 +0000 UTC m=+1086.183197323" lastFinishedPulling="2025-09-30 20:31:51.916319656 +0000 UTC m=+1097.032352783" observedRunningTime="2025-09-30 20:31:52.620989523 +0000 UTC m=+1097.737022660" watchObservedRunningTime="2025-09-30 20:31:52.621281911 +0000 UTC m=+1097.737315048" Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.655661 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.655640236 podStartE2EDuration="9.655640236s" podCreationTimestamp="2025-09-30 20:31:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:31:52.642061077 +0000 UTC m=+1097.758094194" watchObservedRunningTime="2025-09-30 20:31:52.655640236 +0000 UTC m=+1097.771673363" Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.889951 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.890309 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.920163 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 20:31:52 crc kubenswrapper[4919]: I0930 20:31:52.933821 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.620011 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6f9cd6fc64-z8qnp" event={"ID":"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60","Type":"ContainerStarted","Data":"33f5e2d163c8a99bd3f4445a1676b34a9e6493072c94c31b69e3076f89223fba"} Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.620053 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6f9cd6fc64-z8qnp" event={"ID":"7e3e697a-4e47-48d5-a3f9-ae4d4a772f60","Type":"ContainerStarted","Data":"909631408f6b2f56694ffc78c2b921078768e4cada840c5b967a9c6021ff370e"} Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.620185 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.622339 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-659c895849-vsrcz" event={"ID":"b8f18d42-d7ea-42d1-bbcb-a81afc1b0508","Type":"ContainerStarted","Data":"62358ef7d5a90c01d842b821682fed5c32876079fe1775c97a2aa16e19e8f7f5"} Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.623540 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.623573 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.623679 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.698149 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6f9cd6fc64-z8qnp" podStartSLOduration=8.698130891 podStartE2EDuration="8.698130891s" podCreationTimestamp="2025-09-30 20:31:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:31:53.658887766 +0000 UTC m=+1098.774920903" watchObservedRunningTime="2025-09-30 20:31:53.698130891 +0000 UTC m=+1098.814164008" Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.699439 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-659c895849-vsrcz" podStartSLOduration=5.699432938 podStartE2EDuration="5.699432938s" podCreationTimestamp="2025-09-30 20:31:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:31:53.689194295 +0000 UTC m=+1098.805227432" watchObservedRunningTime="2025-09-30 20:31:53.699432938 +0000 UTC m=+1098.815466065" Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.866464 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.866506 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.936152 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:53 crc kubenswrapper[4919]: I0930 20:31:53.950902 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:54 crc kubenswrapper[4919]: I0930 20:31:54.631042 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:54 crc kubenswrapper[4919]: I0930 20:31:54.631107 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:54 crc kubenswrapper[4919]: I0930 20:31:54.631122 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:31:55 crc kubenswrapper[4919]: I0930 20:31:55.648682 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 20:31:55 crc kubenswrapper[4919]: I0930 20:31:55.648984 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 20:31:56 crc kubenswrapper[4919]: I0930 20:31:56.062669 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:31:56 crc kubenswrapper[4919]: I0930 20:31:56.062742 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:31:56 crc kubenswrapper[4919]: I0930 20:31:56.442897 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:56 crc kubenswrapper[4919]: I0930 20:31:56.649809 4919 generic.go:334] "Generic (PLEG): container finished" podID="39cdbc20-9bb1-4527-8195-f2b885c676a4" containerID="8e4b6f20e5c822c9f587c4cac1efc4bb6ec2f33f860884d74dfb01efec1bfee7" exitCode=0 Sep 30 20:31:56 crc kubenswrapper[4919]: I0930 20:31:56.649914 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jphnh" event={"ID":"39cdbc20-9bb1-4527-8195-f2b885c676a4","Type":"ContainerDied","Data":"8e4b6f20e5c822c9f587c4cac1efc4bb6ec2f33f860884d74dfb01efec1bfee7"} Sep 30 20:31:58 crc kubenswrapper[4919]: I0930 20:31:58.848451 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jphnh" Sep 30 20:31:58 crc kubenswrapper[4919]: I0930 20:31:58.954600 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39cdbc20-9bb1-4527-8195-f2b885c676a4-combined-ca-bundle\") pod \"39cdbc20-9bb1-4527-8195-f2b885c676a4\" (UID: \"39cdbc20-9bb1-4527-8195-f2b885c676a4\") " Sep 30 20:31:58 crc kubenswrapper[4919]: I0930 20:31:58.954702 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdn58\" (UniqueName: \"kubernetes.io/projected/39cdbc20-9bb1-4527-8195-f2b885c676a4-kube-api-access-tdn58\") pod \"39cdbc20-9bb1-4527-8195-f2b885c676a4\" (UID: \"39cdbc20-9bb1-4527-8195-f2b885c676a4\") " Sep 30 20:31:58 crc kubenswrapper[4919]: I0930 20:31:58.954750 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/39cdbc20-9bb1-4527-8195-f2b885c676a4-db-sync-config-data\") pod \"39cdbc20-9bb1-4527-8195-f2b885c676a4\" (UID: \"39cdbc20-9bb1-4527-8195-f2b885c676a4\") " Sep 30 20:31:58 crc kubenswrapper[4919]: I0930 20:31:58.961784 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39cdbc20-9bb1-4527-8195-f2b885c676a4-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "39cdbc20-9bb1-4527-8195-f2b885c676a4" (UID: "39cdbc20-9bb1-4527-8195-f2b885c676a4"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:58 crc kubenswrapper[4919]: I0930 20:31:58.982046 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39cdbc20-9bb1-4527-8195-f2b885c676a4-kube-api-access-tdn58" (OuterVolumeSpecName: "kube-api-access-tdn58") pod "39cdbc20-9bb1-4527-8195-f2b885c676a4" (UID: "39cdbc20-9bb1-4527-8195-f2b885c676a4"). InnerVolumeSpecName "kube-api-access-tdn58". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:31:58 crc kubenswrapper[4919]: I0930 20:31:58.982742 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39cdbc20-9bb1-4527-8195-f2b885c676a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39cdbc20-9bb1-4527-8195-f2b885c676a4" (UID: "39cdbc20-9bb1-4527-8195-f2b885c676a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:31:59 crc kubenswrapper[4919]: I0930 20:31:59.057005 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39cdbc20-9bb1-4527-8195-f2b885c676a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:59 crc kubenswrapper[4919]: I0930 20:31:59.057042 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdn58\" (UniqueName: \"kubernetes.io/projected/39cdbc20-9bb1-4527-8195-f2b885c676a4-kube-api-access-tdn58\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:59 crc kubenswrapper[4919]: I0930 20:31:59.057054 4919 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/39cdbc20-9bb1-4527-8195-f2b885c676a4-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:31:59 crc kubenswrapper[4919]: I0930 20:31:59.488232 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 20:31:59 crc kubenswrapper[4919]: I0930 20:31:59.680545 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-jphnh" event={"ID":"39cdbc20-9bb1-4527-8195-f2b885c676a4","Type":"ContainerDied","Data":"2a65801e9136c8819e83cca31871ddebcb21a7d823cd5dece9998704514d4bf7"} Sep 30 20:31:59 crc kubenswrapper[4919]: I0930 20:31:59.680589 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a65801e9136c8819e83cca31871ddebcb21a7d823cd5dece9998704514d4bf7" Sep 30 20:31:59 crc kubenswrapper[4919]: I0930 20:31:59.680653 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-jphnh" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.139294 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7f8946bc95-rbsd8"] Sep 30 20:32:00 crc kubenswrapper[4919]: E0930 20:32:00.139787 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39cdbc20-9bb1-4527-8195-f2b885c676a4" containerName="barbican-db-sync" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.139805 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="39cdbc20-9bb1-4527-8195-f2b885c676a4" containerName="barbican-db-sync" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.140039 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="39cdbc20-9bb1-4527-8195-f2b885c676a4" containerName="barbican-db-sync" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.141240 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.147006 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.147270 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-95shp" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.147484 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.150398 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6"] Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.152505 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.156562 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.171430 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7f8946bc95-rbsd8"] Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.187072 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbn6g\" (UniqueName: \"kubernetes.io/projected/32220ef4-7a02-469d-8d56-fd48736838e0-kube-api-access-vbn6g\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.188140 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ksnd\" (UniqueName: \"kubernetes.io/projected/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-kube-api-access-8ksnd\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.188333 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32220ef4-7a02-469d-8d56-fd48736838e0-combined-ca-bundle\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.188453 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/32220ef4-7a02-469d-8d56-fd48736838e0-config-data-custom\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.188550 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-logs\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.188708 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-config-data\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.188818 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32220ef4-7a02-469d-8d56-fd48736838e0-config-data\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.188943 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-config-data-custom\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.189153 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32220ef4-7a02-469d-8d56-fd48736838e0-logs\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.189505 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-combined-ca-bundle\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.212154 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6"] Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.253090 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-vmx8b"] Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.254572 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.273512 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-vmx8b"] Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.290734 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ksnd\" (UniqueName: \"kubernetes.io/projected/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-kube-api-access-8ksnd\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.290793 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32220ef4-7a02-469d-8d56-fd48736838e0-combined-ca-bundle\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.290820 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/32220ef4-7a02-469d-8d56-fd48736838e0-config-data-custom\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.290836 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-logs\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.290877 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.290903 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-config-data\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.290925 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32220ef4-7a02-469d-8d56-fd48736838e0-config-data\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.290955 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.290970 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-config-data-custom\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.290997 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32220ef4-7a02-469d-8d56-fd48736838e0-logs\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.291020 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-combined-ca-bundle\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.291039 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-config\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.291093 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.291111 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbn6g\" (UniqueName: \"kubernetes.io/projected/32220ef4-7a02-469d-8d56-fd48736838e0-kube-api-access-vbn6g\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.291155 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.291173 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc2vh\" (UniqueName: \"kubernetes.io/projected/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-kube-api-access-qc2vh\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.295010 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-config-data-custom\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.295302 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32220ef4-7a02-469d-8d56-fd48736838e0-logs\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.297320 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-logs\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.298201 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32220ef4-7a02-469d-8d56-fd48736838e0-config-data\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.299250 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32220ef4-7a02-469d-8d56-fd48736838e0-combined-ca-bundle\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.299439 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-combined-ca-bundle\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.300719 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-config-data\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.300826 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/32220ef4-7a02-469d-8d56-fd48736838e0-config-data-custom\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.310038 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbn6g\" (UniqueName: \"kubernetes.io/projected/32220ef4-7a02-469d-8d56-fd48736838e0-kube-api-access-vbn6g\") pod \"barbican-keystone-listener-6dbbb7bcf8-57zt6\" (UID: \"32220ef4-7a02-469d-8d56-fd48736838e0\") " pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.316453 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ksnd\" (UniqueName: \"kubernetes.io/projected/0ffb951e-fda6-4079-ba13-02ddbd2ab58f-kube-api-access-8ksnd\") pod \"barbican-worker-7f8946bc95-rbsd8\" (UID: \"0ffb951e-fda6-4079-ba13-02ddbd2ab58f\") " pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.389192 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-755c6889fd-n5x5p"] Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.394253 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.395985 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.396045 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-config\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.396085 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.396145 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.396169 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc2vh\" (UniqueName: \"kubernetes.io/projected/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-kube-api-access-qc2vh\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.396280 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.397275 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-ovsdbserver-sb\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.397983 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-ovsdbserver-nb\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.398450 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.399561 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-dns-swift-storage-0\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.399753 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-config\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.400036 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-dns-svc\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.422723 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc2vh\" (UniqueName: \"kubernetes.io/projected/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-kube-api-access-qc2vh\") pod \"dnsmasq-dns-59d5ff467f-vmx8b\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.423473 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-755c6889fd-n5x5p"] Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.481950 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7f8946bc95-rbsd8" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.497680 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-combined-ca-bundle\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.497761 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-config-data\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.497789 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swkxm\" (UniqueName: \"kubernetes.io/projected/5d59e33b-2daa-46c5-9022-2b3509e817e8-kube-api-access-swkxm\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.497806 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-config-data-custom\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.498167 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d59e33b-2daa-46c5-9022-2b3509e817e8-logs\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.499689 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.574506 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.604604 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d59e33b-2daa-46c5-9022-2b3509e817e8-logs\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.604913 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-combined-ca-bundle\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.606136 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-config-data\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.606206 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swkxm\" (UniqueName: \"kubernetes.io/projected/5d59e33b-2daa-46c5-9022-2b3509e817e8-kube-api-access-swkxm\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.606265 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-config-data-custom\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.607990 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d59e33b-2daa-46c5-9022-2b3509e817e8-logs\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.610329 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-config-data-custom\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.619859 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-combined-ca-bundle\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.622632 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-config-data\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.644553 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swkxm\" (UniqueName: \"kubernetes.io/projected/5d59e33b-2daa-46c5-9022-2b3509e817e8-kube-api-access-swkxm\") pod \"barbican-api-755c6889fd-n5x5p\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:00 crc kubenswrapper[4919]: I0930 20:32:00.772856 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.781887 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-ff644977d-6mdtd"] Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.792662 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.797557 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.797941 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.860364 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-ff644977d-6mdtd"] Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.864727 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-config-data\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.864771 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/685bc25b-e005-4a7c-933b-87fd1b925709-logs\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.864807 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-public-tls-certs\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.864858 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-config-data-custom\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.864897 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkprk\" (UniqueName: \"kubernetes.io/projected/685bc25b-e005-4a7c-933b-87fd1b925709-kube-api-access-jkprk\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.865054 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-combined-ca-bundle\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.865148 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-internal-tls-certs\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.966308 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkprk\" (UniqueName: \"kubernetes.io/projected/685bc25b-e005-4a7c-933b-87fd1b925709-kube-api-access-jkprk\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.966386 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-combined-ca-bundle\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.966444 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-internal-tls-certs\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.966482 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-config-data\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.966498 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/685bc25b-e005-4a7c-933b-87fd1b925709-logs\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.966521 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-public-tls-certs\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.966551 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-config-data-custom\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.967330 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/685bc25b-e005-4a7c-933b-87fd1b925709-logs\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.971699 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-config-data-custom\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.971975 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-public-tls-certs\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.973196 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-config-data\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.985726 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-combined-ca-bundle\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.993835 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkprk\" (UniqueName: \"kubernetes.io/projected/685bc25b-e005-4a7c-933b-87fd1b925709-kube-api-access-jkprk\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:02 crc kubenswrapper[4919]: I0930 20:32:02.998368 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/685bc25b-e005-4a7c-933b-87fd1b925709-internal-tls-certs\") pod \"barbican-api-ff644977d-6mdtd\" (UID: \"685bc25b-e005-4a7c-933b-87fd1b925709\") " pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:03 crc kubenswrapper[4919]: I0930 20:32:03.125905 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:07 crc kubenswrapper[4919]: E0930 20:32:07.762957 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24:latest" Sep 30 20:32:07 crc kubenswrapper[4919]: E0930 20:32:07.764081 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24:latest,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kjxc6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 30 20:32:07 crc kubenswrapper[4919]: E0930 20:32:07.765552 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" Sep 30 20:32:07 crc kubenswrapper[4919]: I0930 20:32:07.783775 4919 generic.go:334] "Generic (PLEG): container finished" podID="737cb8aa-63c3-4a59-893c-3d5075795304" containerID="f2e6cd8e280ead561c63fbecc47f9cf11fdc13114c08ceefc02b347301650935" exitCode=0 Sep 30 20:32:07 crc kubenswrapper[4919]: I0930 20:32:07.783882 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pl4gj" event={"ID":"737cb8aa-63c3-4a59-893c-3d5075795304","Type":"ContainerDied","Data":"f2e6cd8e280ead561c63fbecc47f9cf11fdc13114c08ceefc02b347301650935"} Sep 30 20:32:07 crc kubenswrapper[4919]: I0930 20:32:07.784030 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerName="ceilometer-central-agent" containerID="cri-o://96cfa8db319d6c6d8ddfed9bf197f2b68ab300628e5f01c787b4e81b2d06b5cb" gracePeriod=30 Sep 30 20:32:07 crc kubenswrapper[4919]: I0930 20:32:07.784091 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerName="sg-core" containerID="cri-o://d4b547265ce318392550a8af579d3b8716723076d7ad569522200356991a0dce" gracePeriod=30 Sep 30 20:32:07 crc kubenswrapper[4919]: I0930 20:32:07.784081 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerName="ceilometer-notification-agent" containerID="cri-o://f3c7d5ce118170fb3ca31dfd81f71a4772449a7523e4a659c40302d816a03232" gracePeriod=30 Sep 30 20:32:08 crc kubenswrapper[4919]: I0930 20:32:08.796767 4919 generic.go:334] "Generic (PLEG): container finished" podID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerID="d4b547265ce318392550a8af579d3b8716723076d7ad569522200356991a0dce" exitCode=2 Sep 30 20:32:08 crc kubenswrapper[4919]: I0930 20:32:08.797082 4919 generic.go:334] "Generic (PLEG): container finished" podID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerID="96cfa8db319d6c6d8ddfed9bf197f2b68ab300628e5f01c787b4e81b2d06b5cb" exitCode=0 Sep 30 20:32:08 crc kubenswrapper[4919]: I0930 20:32:08.796846 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2","Type":"ContainerDied","Data":"d4b547265ce318392550a8af579d3b8716723076d7ad569522200356991a0dce"} Sep 30 20:32:08 crc kubenswrapper[4919]: I0930 20:32:08.797185 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2","Type":"ContainerDied","Data":"96cfa8db319d6c6d8ddfed9bf197f2b68ab300628e5f01c787b4e81b2d06b5cb"} Sep 30 20:32:08 crc kubenswrapper[4919]: E0930 20:32:08.842827 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Sep 30 20:32:08 crc kubenswrapper[4919]: E0930 20:32:08.843039 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8wlgl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-sp45k_openstack(8d660eb4-7718-4b2b-a834-9b7d0d2b64a4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 20:32:08 crc kubenswrapper[4919]: E0930 20:32:08.844293 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-sp45k" podUID="8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.175324 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.293138 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/737cb8aa-63c3-4a59-893c-3d5075795304-combined-ca-bundle\") pod \"737cb8aa-63c3-4a59-893c-3d5075795304\" (UID: \"737cb8aa-63c3-4a59-893c-3d5075795304\") " Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.293267 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/737cb8aa-63c3-4a59-893c-3d5075795304-config\") pod \"737cb8aa-63c3-4a59-893c-3d5075795304\" (UID: \"737cb8aa-63c3-4a59-893c-3d5075795304\") " Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.293455 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvs2v\" (UniqueName: \"kubernetes.io/projected/737cb8aa-63c3-4a59-893c-3d5075795304-kube-api-access-fvs2v\") pod \"737cb8aa-63c3-4a59-893c-3d5075795304\" (UID: \"737cb8aa-63c3-4a59-893c-3d5075795304\") " Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.300285 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/737cb8aa-63c3-4a59-893c-3d5075795304-kube-api-access-fvs2v" (OuterVolumeSpecName: "kube-api-access-fvs2v") pod "737cb8aa-63c3-4a59-893c-3d5075795304" (UID: "737cb8aa-63c3-4a59-893c-3d5075795304"). InnerVolumeSpecName "kube-api-access-fvs2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.321595 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/737cb8aa-63c3-4a59-893c-3d5075795304-config" (OuterVolumeSpecName: "config") pod "737cb8aa-63c3-4a59-893c-3d5075795304" (UID: "737cb8aa-63c3-4a59-893c-3d5075795304"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.321675 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/737cb8aa-63c3-4a59-893c-3d5075795304-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "737cb8aa-63c3-4a59-893c-3d5075795304" (UID: "737cb8aa-63c3-4a59-893c-3d5075795304"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:09 crc kubenswrapper[4919]: W0930 20:32:09.367852 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32220ef4_7a02_469d_8d56_fd48736838e0.slice/crio-fa5cde9c1064d4175395f06e1cd8341bc395b085edab206f357b1e37d0adcd7a WatchSource:0}: Error finding container fa5cde9c1064d4175395f06e1cd8341bc395b085edab206f357b1e37d0adcd7a: Status 404 returned error can't find the container with id fa5cde9c1064d4175395f06e1cd8341bc395b085edab206f357b1e37d0adcd7a Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.368127 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6"] Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.396188 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvs2v\" (UniqueName: \"kubernetes.io/projected/737cb8aa-63c3-4a59-893c-3d5075795304-kube-api-access-fvs2v\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.396241 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/737cb8aa-63c3-4a59-893c-3d5075795304-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.396254 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/737cb8aa-63c3-4a59-893c-3d5075795304-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.429473 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7f8946bc95-rbsd8"] Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.438661 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-ff644977d-6mdtd"] Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.447532 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-755c6889fd-n5x5p"] Sep 30 20:32:09 crc kubenswrapper[4919]: W0930 20:32:09.454830 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d59e33b_2daa_46c5_9022_2b3509e817e8.slice/crio-0b85a10b57e7409e84168a6392c7161e860a11e9b7511979cd2d15bec95622fe WatchSource:0}: Error finding container 0b85a10b57e7409e84168a6392c7161e860a11e9b7511979cd2d15bec95622fe: Status 404 returned error can't find the container with id 0b85a10b57e7409e84168a6392c7161e860a11e9b7511979cd2d15bec95622fe Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.454935 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-vmx8b"] Sep 30 20:32:09 crc kubenswrapper[4919]: W0930 20:32:09.460803 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbcd42878_f86e_4a13_9e9d_d0ad211a0c02.slice/crio-3679d0848f367af3f6ffcdbc846bb73385326e70acd2fe63d5ac490c365055de WatchSource:0}: Error finding container 3679d0848f367af3f6ffcdbc846bb73385326e70acd2fe63d5ac490c365055de: Status 404 returned error can't find the container with id 3679d0848f367af3f6ffcdbc846bb73385326e70acd2fe63d5ac490c365055de Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.809934 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-ff644977d-6mdtd" event={"ID":"685bc25b-e005-4a7c-933b-87fd1b925709","Type":"ContainerStarted","Data":"d0f76b731a8aea7d255c7c7c9c723214a0d2d9fb8d1276abc4a888ceb0567e16"} Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.810386 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-ff644977d-6mdtd" event={"ID":"685bc25b-e005-4a7c-933b-87fd1b925709","Type":"ContainerStarted","Data":"a2612a283c0fc706b6d69a0c3458749453894e84cb03feba699fbab659929d01"} Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.811166 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f8946bc95-rbsd8" event={"ID":"0ffb951e-fda6-4079-ba13-02ddbd2ab58f","Type":"ContainerStarted","Data":"40cc90aba8bbfd91896962b648f1c82e5984a8acb9d69b2091cc68fe07c9b12e"} Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.812632 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-755c6889fd-n5x5p" event={"ID":"5d59e33b-2daa-46c5-9022-2b3509e817e8","Type":"ContainerStarted","Data":"f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767"} Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.812657 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-755c6889fd-n5x5p" event={"ID":"5d59e33b-2daa-46c5-9022-2b3509e817e8","Type":"ContainerStarted","Data":"0b85a10b57e7409e84168a6392c7161e860a11e9b7511979cd2d15bec95622fe"} Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.813985 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" event={"ID":"32220ef4-7a02-469d-8d56-fd48736838e0","Type":"ContainerStarted","Data":"fa5cde9c1064d4175395f06e1cd8341bc395b085edab206f357b1e37d0adcd7a"} Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.815055 4919 generic.go:334] "Generic (PLEG): container finished" podID="bcd42878-f86e-4a13-9e9d-d0ad211a0c02" containerID="d38c27ce991698ebf30e8764fe2011b58268fe8d80a4d49facc02cea268a904e" exitCode=0 Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.815101 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" event={"ID":"bcd42878-f86e-4a13-9e9d-d0ad211a0c02","Type":"ContainerDied","Data":"d38c27ce991698ebf30e8764fe2011b58268fe8d80a4d49facc02cea268a904e"} Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.815117 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" event={"ID":"bcd42878-f86e-4a13-9e9d-d0ad211a0c02","Type":"ContainerStarted","Data":"3679d0848f367af3f6ffcdbc846bb73385326e70acd2fe63d5ac490c365055de"} Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.818903 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-pl4gj" event={"ID":"737cb8aa-63c3-4a59-893c-3d5075795304","Type":"ContainerDied","Data":"3cc34d7cce8732d3c5d9d1823b06f85cfa23b88dd2fabcc6f04a63496e72e400"} Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.818941 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-pl4gj" Sep 30 20:32:09 crc kubenswrapper[4919]: I0930 20:32:09.818967 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3cc34d7cce8732d3c5d9d1823b06f85cfa23b88dd2fabcc6f04a63496e72e400" Sep 30 20:32:09 crc kubenswrapper[4919]: E0930 20:32:09.820412 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-sp45k" podUID="8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.011791 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-vmx8b"] Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.053400 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-22zsz"] Sep 30 20:32:10 crc kubenswrapper[4919]: E0930 20:32:10.053727 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="737cb8aa-63c3-4a59-893c-3d5075795304" containerName="neutron-db-sync" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.053738 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="737cb8aa-63c3-4a59-893c-3d5075795304" containerName="neutron-db-sync" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.053914 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="737cb8aa-63c3-4a59-893c-3d5075795304" containerName="neutron-db-sync" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.054721 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.081184 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7975dfb48-wvh2l"] Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.088264 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.096208 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.096423 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-qjgb7" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.097167 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.097207 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.114873 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-combined-ca-bundle\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.114932 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.114963 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.115008 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s98rj\" (UniqueName: \"kubernetes.io/projected/32fe776c-73a1-43fc-90c4-75c1f56c9966-kube-api-access-s98rj\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.115036 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9kn9\" (UniqueName: \"kubernetes.io/projected/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-kube-api-access-k9kn9\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.115068 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.115157 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-config\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.115500 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-config\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.115554 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.115601 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-httpd-config\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.117227 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-ovndb-tls-certs\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.127115 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7975dfb48-wvh2l"] Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.171774 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-22zsz"] Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.219484 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-config\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.219536 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.219565 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-httpd-config\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.219614 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-ovndb-tls-certs\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.219635 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-combined-ca-bundle\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.219668 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.219696 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.219741 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s98rj\" (UniqueName: \"kubernetes.io/projected/32fe776c-73a1-43fc-90c4-75c1f56c9966-kube-api-access-s98rj\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.219773 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9kn9\" (UniqueName: \"kubernetes.io/projected/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-kube-api-access-k9kn9\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.219807 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.219830 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-config\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.220638 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-config\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.221173 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-ovsdbserver-nb\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.222425 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-dns-svc\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.223024 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-dns-swift-storage-0\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.224032 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-ovsdbserver-sb\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.231143 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-ovndb-tls-certs\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.231227 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-combined-ca-bundle\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.237780 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-httpd-config\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.243112 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-config\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.246563 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9kn9\" (UniqueName: \"kubernetes.io/projected/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-kube-api-access-k9kn9\") pod \"neutron-7975dfb48-wvh2l\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.249802 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s98rj\" (UniqueName: \"kubernetes.io/projected/32fe776c-73a1-43fc-90c4-75c1f56c9966-kube-api-access-s98rj\") pod \"dnsmasq-dns-75c8ddd69c-22zsz\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.455075 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.455586 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.836357 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-ff644977d-6mdtd" event={"ID":"685bc25b-e005-4a7c-933b-87fd1b925709","Type":"ContainerStarted","Data":"6a8144a5d15312456f258e87128c0a708b1aee8c22288f0a9d38d7a44342548a"} Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.838138 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.838184 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.839908 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-755c6889fd-n5x5p" event={"ID":"5d59e33b-2daa-46c5-9022-2b3509e817e8","Type":"ContainerStarted","Data":"f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298"} Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.839981 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.840008 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.845540 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" event={"ID":"bcd42878-f86e-4a13-9e9d-d0ad211a0c02","Type":"ContainerStarted","Data":"8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077"} Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.845758 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" podUID="bcd42878-f86e-4a13-9e9d-d0ad211a0c02" containerName="dnsmasq-dns" containerID="cri-o://8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077" gracePeriod=10 Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.845795 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.850317 4919 generic.go:334] "Generic (PLEG): container finished" podID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerID="f3c7d5ce118170fb3ca31dfd81f71a4772449a7523e4a659c40302d816a03232" exitCode=0 Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.850357 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2","Type":"ContainerDied","Data":"f3c7d5ce118170fb3ca31dfd81f71a4772449a7523e4a659c40302d816a03232"} Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.867086 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-ff644977d-6mdtd" podStartSLOduration=8.867066599 podStartE2EDuration="8.867066599s" podCreationTimestamp="2025-09-30 20:32:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:10.855753015 +0000 UTC m=+1115.971786152" watchObservedRunningTime="2025-09-30 20:32:10.867066599 +0000 UTC m=+1115.983099726" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.886016 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-755c6889fd-n5x5p" podStartSLOduration=10.885996552 podStartE2EDuration="10.885996552s" podCreationTimestamp="2025-09-30 20:32:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:10.883602493 +0000 UTC m=+1115.999635620" watchObservedRunningTime="2025-09-30 20:32:10.885996552 +0000 UTC m=+1116.002029679" Sep 30 20:32:10 crc kubenswrapper[4919]: I0930 20:32:10.912661 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" podStartSLOduration=10.912638236 podStartE2EDuration="10.912638236s" podCreationTimestamp="2025-09-30 20:32:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:10.904645647 +0000 UTC m=+1116.020678774" watchObservedRunningTime="2025-09-30 20:32:10.912638236 +0000 UTC m=+1116.028671363" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.022909 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.045041 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-config-data\") pod \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.045125 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjxc6\" (UniqueName: \"kubernetes.io/projected/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-kube-api-access-kjxc6\") pod \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.045143 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-scripts\") pod \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.045174 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-combined-ca-bundle\") pod \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.045349 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-sg-core-conf-yaml\") pod \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.045391 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-log-httpd\") pod \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.045451 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-run-httpd\") pod \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\" (UID: \"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.046107 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" (UID: "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.046320 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" (UID: "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.051311 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-scripts" (OuterVolumeSpecName: "scripts") pod "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" (UID: "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.055661 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-kube-api-access-kjxc6" (OuterVolumeSpecName: "kube-api-access-kjxc6") pod "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" (UID: "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2"). InnerVolumeSpecName "kube-api-access-kjxc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.073537 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" (UID: "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.129682 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-22zsz"] Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.130984 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-config-data" (OuterVolumeSpecName: "config-data") pod "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" (UID: "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.132810 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" (UID: "29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.149151 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.149196 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjxc6\" (UniqueName: \"kubernetes.io/projected/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-kube-api-access-kjxc6\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.149226 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.149242 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.149254 4919 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.149264 4919 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.149274 4919 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.179173 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7975dfb48-wvh2l"] Sep 30 20:32:11 crc kubenswrapper[4919]: W0930 20:32:11.560812 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb97d953a_480a_41b8_bbc5_b9a87b3a20cb.slice/crio-04758ee6b14c13dc53a38e633efb79bbc288b37d74bd9d7e88af24abb7f9a288 WatchSource:0}: Error finding container 04758ee6b14c13dc53a38e633efb79bbc288b37d74bd9d7e88af24abb7f9a288: Status 404 returned error can't find the container with id 04758ee6b14c13dc53a38e633efb79bbc288b37d74bd9d7e88af24abb7f9a288 Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.570406 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.656454 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-dns-svc\") pod \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.656510 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-config\") pod \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.656618 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qc2vh\" (UniqueName: \"kubernetes.io/projected/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-kube-api-access-qc2vh\") pod \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.656694 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-dns-swift-storage-0\") pod \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.656723 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-ovsdbserver-nb\") pod \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.656771 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-ovsdbserver-sb\") pod \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\" (UID: \"bcd42878-f86e-4a13-9e9d-d0ad211a0c02\") " Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.661274 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-kube-api-access-qc2vh" (OuterVolumeSpecName: "kube-api-access-qc2vh") pod "bcd42878-f86e-4a13-9e9d-d0ad211a0c02" (UID: "bcd42878-f86e-4a13-9e9d-d0ad211a0c02"). InnerVolumeSpecName "kube-api-access-qc2vh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.715393 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bcd42878-f86e-4a13-9e9d-d0ad211a0c02" (UID: "bcd42878-f86e-4a13-9e9d-d0ad211a0c02"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.720475 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bcd42878-f86e-4a13-9e9d-d0ad211a0c02" (UID: "bcd42878-f86e-4a13-9e9d-d0ad211a0c02"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.724895 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bcd42878-f86e-4a13-9e9d-d0ad211a0c02" (UID: "bcd42878-f86e-4a13-9e9d-d0ad211a0c02"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.735989 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bcd42878-f86e-4a13-9e9d-d0ad211a0c02" (UID: "bcd42878-f86e-4a13-9e9d-d0ad211a0c02"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.751312 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-config" (OuterVolumeSpecName: "config") pod "bcd42878-f86e-4a13-9e9d-d0ad211a0c02" (UID: "bcd42878-f86e-4a13-9e9d-d0ad211a0c02"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.759565 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.759603 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.759614 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.759623 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qc2vh\" (UniqueName: \"kubernetes.io/projected/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-kube-api-access-qc2vh\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.759634 4919 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.759642 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bcd42878-f86e-4a13-9e9d-d0ad211a0c02-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.861668 4919 generic.go:334] "Generic (PLEG): container finished" podID="bcd42878-f86e-4a13-9e9d-d0ad211a0c02" containerID="8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077" exitCode=0 Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.861721 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" event={"ID":"bcd42878-f86e-4a13-9e9d-d0ad211a0c02","Type":"ContainerDied","Data":"8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077"} Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.861747 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" event={"ID":"bcd42878-f86e-4a13-9e9d-d0ad211a0c02","Type":"ContainerDied","Data":"3679d0848f367af3f6ffcdbc846bb73385326e70acd2fe63d5ac490c365055de"} Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.861761 4919 scope.go:117] "RemoveContainer" containerID="8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.861878 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59d5ff467f-vmx8b" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.874262 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2","Type":"ContainerDied","Data":"128d1d24b1ae14e5677d960a6e1162162b6665780cf29525d25492b895d0db5c"} Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.874386 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.876746 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7975dfb48-wvh2l" event={"ID":"b97d953a-480a-41b8-bbc5-b9a87b3a20cb","Type":"ContainerStarted","Data":"04758ee6b14c13dc53a38e633efb79bbc288b37d74bd9d7e88af24abb7f9a288"} Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.928127 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.946882 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.956814 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-vmx8b"] Sep 30 20:32:11 crc kubenswrapper[4919]: I0930 20:32:11.980865 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59d5ff467f-vmx8b"] Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:11.997341 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:32:12 crc kubenswrapper[4919]: E0930 20:32:11.998108 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerName="sg-core" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:11.998127 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerName="sg-core" Sep 30 20:32:12 crc kubenswrapper[4919]: E0930 20:32:11.998143 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcd42878-f86e-4a13-9e9d-d0ad211a0c02" containerName="init" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:11.998149 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcd42878-f86e-4a13-9e9d-d0ad211a0c02" containerName="init" Sep 30 20:32:12 crc kubenswrapper[4919]: E0930 20:32:11.998168 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcd42878-f86e-4a13-9e9d-d0ad211a0c02" containerName="dnsmasq-dns" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:11.998174 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcd42878-f86e-4a13-9e9d-d0ad211a0c02" containerName="dnsmasq-dns" Sep 30 20:32:12 crc kubenswrapper[4919]: E0930 20:32:11.998189 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerName="ceilometer-central-agent" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:11.998196 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerName="ceilometer-central-agent" Sep 30 20:32:12 crc kubenswrapper[4919]: E0930 20:32:11.998225 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerName="ceilometer-notification-agent" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:11.998232 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerName="ceilometer-notification-agent" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:11.998394 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcd42878-f86e-4a13-9e9d-d0ad211a0c02" containerName="dnsmasq-dns" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:11.998408 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerName="sg-core" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:11.998422 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerName="ceilometer-notification-agent" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:11.998434 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" containerName="ceilometer-central-agent" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.000363 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.003233 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.004917 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.005165 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.067539 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d317adc-ca3d-45f8-b364-680ae2c9fe18-log-httpd\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.067577 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d317adc-ca3d-45f8-b364-680ae2c9fe18-run-httpd\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.067599 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.067756 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkggz\" (UniqueName: \"kubernetes.io/projected/9d317adc-ca3d-45f8-b364-680ae2c9fe18-kube-api-access-wkggz\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.067832 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-scripts\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.067929 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-config-data\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.068001 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: W0930 20:32:12.073667 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32fe776c_73a1_43fc_90c4_75c1f56c9966.slice/crio-def45f6efd4ec7f67b5f72272d2049e2329d3698d61aba4b2df346b7565ea135 WatchSource:0}: Error finding container def45f6efd4ec7f67b5f72272d2049e2329d3698d61aba4b2df346b7565ea135: Status 404 returned error can't find the container with id def45f6efd4ec7f67b5f72272d2049e2329d3698d61aba4b2df346b7565ea135 Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.164933 4919 scope.go:117] "RemoveContainer" containerID="d38c27ce991698ebf30e8764fe2011b58268fe8d80a4d49facc02cea268a904e" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.169539 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d317adc-ca3d-45f8-b364-680ae2c9fe18-log-httpd\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.169594 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d317adc-ca3d-45f8-b364-680ae2c9fe18-run-httpd\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.169612 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.169634 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkggz\" (UniqueName: \"kubernetes.io/projected/9d317adc-ca3d-45f8-b364-680ae2c9fe18-kube-api-access-wkggz\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.169660 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-scripts\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.169699 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-config-data\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.169732 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.169938 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d317adc-ca3d-45f8-b364-680ae2c9fe18-run-httpd\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.170283 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d317adc-ca3d-45f8-b364-680ae2c9fe18-log-httpd\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.173807 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.174134 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-scripts\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.175894 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.176000 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-config-data\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.193982 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkggz\" (UniqueName: \"kubernetes.io/projected/9d317adc-ca3d-45f8-b364-680ae2c9fe18-kube-api-access-wkggz\") pod \"ceilometer-0\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.246265 4919 scope.go:117] "RemoveContainer" containerID="8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077" Sep 30 20:32:12 crc kubenswrapper[4919]: E0930 20:32:12.246588 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077\": container with ID starting with 8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077 not found: ID does not exist" containerID="8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.246626 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077"} err="failed to get container status \"8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077\": rpc error: code = NotFound desc = could not find container \"8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077\": container with ID starting with 8d7c8e897d7df69d794307273ea08f5399fa26034340e11560b1647887c59077 not found: ID does not exist" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.246646 4919 scope.go:117] "RemoveContainer" containerID="d38c27ce991698ebf30e8764fe2011b58268fe8d80a4d49facc02cea268a904e" Sep 30 20:32:12 crc kubenswrapper[4919]: E0930 20:32:12.246999 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d38c27ce991698ebf30e8764fe2011b58268fe8d80a4d49facc02cea268a904e\": container with ID starting with d38c27ce991698ebf30e8764fe2011b58268fe8d80a4d49facc02cea268a904e not found: ID does not exist" containerID="d38c27ce991698ebf30e8764fe2011b58268fe8d80a4d49facc02cea268a904e" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.247055 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d38c27ce991698ebf30e8764fe2011b58268fe8d80a4d49facc02cea268a904e"} err="failed to get container status \"d38c27ce991698ebf30e8764fe2011b58268fe8d80a4d49facc02cea268a904e\": rpc error: code = NotFound desc = could not find container \"d38c27ce991698ebf30e8764fe2011b58268fe8d80a4d49facc02cea268a904e\": container with ID starting with d38c27ce991698ebf30e8764fe2011b58268fe8d80a4d49facc02cea268a904e not found: ID does not exist" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.247093 4919 scope.go:117] "RemoveContainer" containerID="d4b547265ce318392550a8af579d3b8716723076d7ad569522200356991a0dce" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.275562 4919 scope.go:117] "RemoveContainer" containerID="f3c7d5ce118170fb3ca31dfd81f71a4772449a7523e4a659c40302d816a03232" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.320269 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.321738 4919 scope.go:117] "RemoveContainer" containerID="96cfa8db319d6c6d8ddfed9bf197f2b68ab300628e5f01c787b4e81b2d06b5cb" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.804502 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db95ddc59-4ffw5"] Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.806255 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.810338 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.810512 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.849229 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db95ddc59-4ffw5"] Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.897547 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-config\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.897648 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-public-tls-certs\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.897672 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-internal-tls-certs\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.897740 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-httpd-config\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.897857 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnvbt\" (UniqueName: \"kubernetes.io/projected/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-kube-api-access-cnvbt\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.897942 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-ovndb-tls-certs\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.898070 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-combined-ca-bundle\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.916493 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7975dfb48-wvh2l" event={"ID":"b97d953a-480a-41b8-bbc5-b9a87b3a20cb","Type":"ContainerStarted","Data":"f5d1214c9514f8da649a24e16ddb47e8ea8c9a384bdc26619aa32cd2c5a47859"} Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.919708 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f8946bc95-rbsd8" event={"ID":"0ffb951e-fda6-4079-ba13-02ddbd2ab58f","Type":"ContainerStarted","Data":"e9cb89ae2bcfebb5ff6fff38318ca7aaa38c57494aac2a75471f9e7a08fc17bd"} Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.919750 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f8946bc95-rbsd8" event={"ID":"0ffb951e-fda6-4079-ba13-02ddbd2ab58f","Type":"ContainerStarted","Data":"b70c9c1ac41c07660f2fe42fe854450cbc0d2144569671ef4dab42961d03dca6"} Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.922981 4919 generic.go:334] "Generic (PLEG): container finished" podID="32fe776c-73a1-43fc-90c4-75c1f56c9966" containerID="d93fbc801fd0a3bc0daa9b975799fc73270349cd9304aa518eb484cdfaf6aaf1" exitCode=0 Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.923038 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" event={"ID":"32fe776c-73a1-43fc-90c4-75c1f56c9966","Type":"ContainerDied","Data":"d93fbc801fd0a3bc0daa9b975799fc73270349cd9304aa518eb484cdfaf6aaf1"} Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.923065 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" event={"ID":"32fe776c-73a1-43fc-90c4-75c1f56c9966","Type":"ContainerStarted","Data":"def45f6efd4ec7f67b5f72272d2049e2329d3698d61aba4b2df346b7565ea135"} Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.927161 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" event={"ID":"32220ef4-7a02-469d-8d56-fd48736838e0","Type":"ContainerStarted","Data":"960291bb72279f13a6b0a6350ce544f6d663ad7e525a9b92c190938f2ed80662"} Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.956341 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7f8946bc95-rbsd8" podStartSLOduration=10.222378633 podStartE2EDuration="12.956318681s" podCreationTimestamp="2025-09-30 20:32:00 +0000 UTC" firstStartedPulling="2025-09-30 20:32:09.434598102 +0000 UTC m=+1114.550631239" lastFinishedPulling="2025-09-30 20:32:12.16853816 +0000 UTC m=+1117.284571287" observedRunningTime="2025-09-30 20:32:12.944471741 +0000 UTC m=+1118.060504878" watchObservedRunningTime="2025-09-30 20:32:12.956318681 +0000 UTC m=+1118.072351808" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.998919 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-httpd-config\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.998952 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnvbt\" (UniqueName: \"kubernetes.io/projected/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-kube-api-access-cnvbt\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.998981 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-ovndb-tls-certs\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.999038 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-combined-ca-bundle\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.999126 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-config\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.999246 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-public-tls-certs\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:12 crc kubenswrapper[4919]: I0930 20:32:12.999279 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-internal-tls-certs\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.004362 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.007605 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-internal-tls-certs\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.008822 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-combined-ca-bundle\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.009628 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-ovndb-tls-certs\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.009898 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-httpd-config\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.010790 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-public-tls-certs\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.011744 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-config\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.016841 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnvbt\" (UniqueName: \"kubernetes.io/projected/3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0-kube-api-access-cnvbt\") pod \"neutron-db95ddc59-4ffw5\" (UID: \"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0\") " pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.200560 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.645011 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2" path="/var/lib/kubelet/pods/29fa27b6-5c86-43eb-abd6-c3eb68ea2fe2/volumes" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.646081 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcd42878-f86e-4a13-9e9d-d0ad211a0c02" path="/var/lib/kubelet/pods/bcd42878-f86e-4a13-9e9d-d0ad211a0c02/volumes" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.734348 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db95ddc59-4ffw5"] Sep 30 20:32:13 crc kubenswrapper[4919]: W0930 20:32:13.736503 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ae684b0_0fe1_48e2_bcf3_1de5b70a1cc0.slice/crio-c0a4b45f802f6e9090455eba068a3e3e18c0db97ce05ce4b42ff36fde686a40e WatchSource:0}: Error finding container c0a4b45f802f6e9090455eba068a3e3e18c0db97ce05ce4b42ff36fde686a40e: Status 404 returned error can't find the container with id c0a4b45f802f6e9090455eba068a3e3e18c0db97ce05ce4b42ff36fde686a40e Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.942317 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d317adc-ca3d-45f8-b364-680ae2c9fe18","Type":"ContainerStarted","Data":"f9991d6556f27a212ed09be36de35ca08657cbdb6c5561c77e5b46195010ca45"} Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.942649 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d317adc-ca3d-45f8-b364-680ae2c9fe18","Type":"ContainerStarted","Data":"a8803a8584f57b3a435c76f07555f8b3ea10162d268c52def2db4daa1673277a"} Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.944325 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" event={"ID":"32220ef4-7a02-469d-8d56-fd48736838e0","Type":"ContainerStarted","Data":"6771ba2c88c84c8eafcff6e0d43a5f24c10c4219c9ba6a7303804c612698495b"} Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.946675 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db95ddc59-4ffw5" event={"ID":"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0","Type":"ContainerStarted","Data":"c0a4b45f802f6e9090455eba068a3e3e18c0db97ce05ce4b42ff36fde686a40e"} Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.948480 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7975dfb48-wvh2l" event={"ID":"b97d953a-480a-41b8-bbc5-b9a87b3a20cb","Type":"ContainerStarted","Data":"8298497be040e62e3183ad776599c51924b3293af2ebff9fb704621cffdb427d"} Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.948624 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.952335 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" event={"ID":"32fe776c-73a1-43fc-90c4-75c1f56c9966","Type":"ContainerStarted","Data":"6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd"} Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.952383 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.978264 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6dbbb7bcf8-57zt6" podStartSLOduration=11.173881668 podStartE2EDuration="13.978247586s" podCreationTimestamp="2025-09-30 20:32:00 +0000 UTC" firstStartedPulling="2025-09-30 20:32:09.371082081 +0000 UTC m=+1114.487115218" lastFinishedPulling="2025-09-30 20:32:12.175448009 +0000 UTC m=+1117.291481136" observedRunningTime="2025-09-30 20:32:13.966038416 +0000 UTC m=+1119.082071543" watchObservedRunningTime="2025-09-30 20:32:13.978247586 +0000 UTC m=+1119.094280713" Sep 30 20:32:13 crc kubenswrapper[4919]: I0930 20:32:13.991352 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" podStartSLOduration=3.991333311 podStartE2EDuration="3.991333311s" podCreationTimestamp="2025-09-30 20:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:13.988692256 +0000 UTC m=+1119.104725393" watchObservedRunningTime="2025-09-30 20:32:13.991333311 +0000 UTC m=+1119.107366438" Sep 30 20:32:14 crc kubenswrapper[4919]: I0930 20:32:14.012481 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7975dfb48-wvh2l" podStartSLOduration=4.012464966 podStartE2EDuration="4.012464966s" podCreationTimestamp="2025-09-30 20:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:14.011869769 +0000 UTC m=+1119.127902896" watchObservedRunningTime="2025-09-30 20:32:14.012464966 +0000 UTC m=+1119.128498093" Sep 30 20:32:14 crc kubenswrapper[4919]: I0930 20:32:14.962244 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d317adc-ca3d-45f8-b364-680ae2c9fe18","Type":"ContainerStarted","Data":"1a02f2298e590e7a2260a1b9d9f7eb504a220d69bbdc92058fe567b704966d54"} Sep 30 20:32:14 crc kubenswrapper[4919]: I0930 20:32:14.964345 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db95ddc59-4ffw5" event={"ID":"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0","Type":"ContainerStarted","Data":"43e69d4833ce62bc4dda41b1a0d318e4e7aa0377c1897efdbf36919870071fcd"} Sep 30 20:32:14 crc kubenswrapper[4919]: I0930 20:32:14.964381 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db95ddc59-4ffw5" event={"ID":"3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0","Type":"ContainerStarted","Data":"20cf57d55213ff599ef8e98e80955864cedf80b24e11122e12a4793b5abfe7dd"} Sep 30 20:32:14 crc kubenswrapper[4919]: I0930 20:32:14.987242 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db95ddc59-4ffw5" podStartSLOduration=2.987203418 podStartE2EDuration="2.987203418s" podCreationTimestamp="2025-09-30 20:32:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:14.981281188 +0000 UTC m=+1120.097314315" watchObservedRunningTime="2025-09-30 20:32:14.987203418 +0000 UTC m=+1120.103236545" Sep 30 20:32:15 crc kubenswrapper[4919]: I0930 20:32:15.974453 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d317adc-ca3d-45f8-b364-680ae2c9fe18","Type":"ContainerStarted","Data":"37970fcfd99d592da7cfb1be66d2d1c2d2d38199212f2c2a827c3f1f0ef928ee"} Sep 30 20:32:15 crc kubenswrapper[4919]: I0930 20:32:15.974782 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:17 crc kubenswrapper[4919]: I0930 20:32:17.416694 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:32:17 crc kubenswrapper[4919]: I0930 20:32:17.417826 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6f9cd6fc64-z8qnp" Sep 30 20:32:17 crc kubenswrapper[4919]: I0930 20:32:17.644109 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:17 crc kubenswrapper[4919]: I0930 20:32:17.773743 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:19 crc kubenswrapper[4919]: I0930 20:32:19.011249 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d317adc-ca3d-45f8-b364-680ae2c9fe18","Type":"ContainerStarted","Data":"5ced391738ae37ac2cd1d5601fe2be75763a2c5a2e8f464816599363ecc3004c"} Sep 30 20:32:19 crc kubenswrapper[4919]: I0930 20:32:19.011605 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:32:19 crc kubenswrapper[4919]: I0930 20:32:19.035578 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.17400468 podStartE2EDuration="8.035564109s" podCreationTimestamp="2025-09-30 20:32:11 +0000 UTC" firstStartedPulling="2025-09-30 20:32:13.0204361 +0000 UTC m=+1118.136469227" lastFinishedPulling="2025-09-30 20:32:17.881995529 +0000 UTC m=+1122.998028656" observedRunningTime="2025-09-30 20:32:19.029978439 +0000 UTC m=+1124.146011566" watchObservedRunningTime="2025-09-30 20:32:19.035564109 +0000 UTC m=+1124.151597226" Sep 30 20:32:19 crc kubenswrapper[4919]: I0930 20:32:19.641415 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:19 crc kubenswrapper[4919]: I0930 20:32:19.769825 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-ff644977d-6mdtd" Sep 30 20:32:19 crc kubenswrapper[4919]: I0930 20:32:19.818962 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-755c6889fd-n5x5p"] Sep 30 20:32:19 crc kubenswrapper[4919]: I0930 20:32:19.819373 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-755c6889fd-n5x5p" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerName="barbican-api-log" containerID="cri-o://f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767" gracePeriod=30 Sep 30 20:32:19 crc kubenswrapper[4919]: I0930 20:32:19.819697 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-755c6889fd-n5x5p" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerName="barbican-api" containerID="cri-o://f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298" gracePeriod=30 Sep 30 20:32:19 crc kubenswrapper[4919]: I0930 20:32:19.828838 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-755c6889fd-n5x5p" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.162:9311/healthcheck\": EOF" Sep 30 20:32:19 crc kubenswrapper[4919]: I0930 20:32:19.828914 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-755c6889fd-n5x5p" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.162:9311/healthcheck\": EOF" Sep 30 20:32:20 crc kubenswrapper[4919]: I0930 20:32:20.023334 4919 generic.go:334] "Generic (PLEG): container finished" podID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerID="f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767" exitCode=143 Sep 30 20:32:20 crc kubenswrapper[4919]: I0930 20:32:20.023407 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-755c6889fd-n5x5p" event={"ID":"5d59e33b-2daa-46c5-9022-2b3509e817e8","Type":"ContainerDied","Data":"f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767"} Sep 30 20:32:20 crc kubenswrapper[4919]: I0930 20:32:20.424418 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:20 crc kubenswrapper[4919]: I0930 20:32:20.495475 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-rflv5"] Sep 30 20:32:20 crc kubenswrapper[4919]: I0930 20:32:20.495695 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" podUID="40d00a75-5b72-4341-8618-1abb614b53cb" containerName="dnsmasq-dns" containerID="cri-o://3721b47eb484f189890c244c55ec3bf9ec1a039895753039a04e59791b73bf27" gracePeriod=10 Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.040424 4919 generic.go:334] "Generic (PLEG): container finished" podID="40d00a75-5b72-4341-8618-1abb614b53cb" containerID="3721b47eb484f189890c244c55ec3bf9ec1a039895753039a04e59791b73bf27" exitCode=0 Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.040687 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" event={"ID":"40d00a75-5b72-4341-8618-1abb614b53cb","Type":"ContainerDied","Data":"3721b47eb484f189890c244c55ec3bf9ec1a039895753039a04e59791b73bf27"} Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.040717 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" event={"ID":"40d00a75-5b72-4341-8618-1abb614b53cb","Type":"ContainerDied","Data":"2b7189c49995956dd90418447f2f91b37243df2a6e76823b64e901095bf5eec8"} Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.040727 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b7189c49995956dd90418447f2f91b37243df2a6e76823b64e901095bf5eec8" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.134163 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.241565 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-659c895849-vsrcz" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.292413 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-config\") pod \"40d00a75-5b72-4341-8618-1abb614b53cb\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.292504 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-dns-swift-storage-0\") pod \"40d00a75-5b72-4341-8618-1abb614b53cb\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.292548 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-ovsdbserver-nb\") pod \"40d00a75-5b72-4341-8618-1abb614b53cb\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.292607 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbfdp\" (UniqueName: \"kubernetes.io/projected/40d00a75-5b72-4341-8618-1abb614b53cb-kube-api-access-dbfdp\") pod \"40d00a75-5b72-4341-8618-1abb614b53cb\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.292677 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-dns-svc\") pod \"40d00a75-5b72-4341-8618-1abb614b53cb\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.292751 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-ovsdbserver-sb\") pod \"40d00a75-5b72-4341-8618-1abb614b53cb\" (UID: \"40d00a75-5b72-4341-8618-1abb614b53cb\") " Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.320061 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40d00a75-5b72-4341-8618-1abb614b53cb-kube-api-access-dbfdp" (OuterVolumeSpecName: "kube-api-access-dbfdp") pod "40d00a75-5b72-4341-8618-1abb614b53cb" (UID: "40d00a75-5b72-4341-8618-1abb614b53cb"). InnerVolumeSpecName "kube-api-access-dbfdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.396912 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbfdp\" (UniqueName: \"kubernetes.io/projected/40d00a75-5b72-4341-8618-1abb614b53cb-kube-api-access-dbfdp\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.404515 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "40d00a75-5b72-4341-8618-1abb614b53cb" (UID: "40d00a75-5b72-4341-8618-1abb614b53cb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.420727 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-config" (OuterVolumeSpecName: "config") pod "40d00a75-5b72-4341-8618-1abb614b53cb" (UID: "40d00a75-5b72-4341-8618-1abb614b53cb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.436321 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "40d00a75-5b72-4341-8618-1abb614b53cb" (UID: "40d00a75-5b72-4341-8618-1abb614b53cb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.445770 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "40d00a75-5b72-4341-8618-1abb614b53cb" (UID: "40d00a75-5b72-4341-8618-1abb614b53cb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.489705 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "40d00a75-5b72-4341-8618-1abb614b53cb" (UID: "40d00a75-5b72-4341-8618-1abb614b53cb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.498941 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.498973 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.498984 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.498993 4919 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:21 crc kubenswrapper[4919]: I0930 20:32:21.499002 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/40d00a75-5b72-4341-8618-1abb614b53cb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.048569 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b5c85b87-rflv5" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.087099 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-rflv5"] Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.111063 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b5c85b87-rflv5"] Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.140296 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 20:32:22 crc kubenswrapper[4919]: E0930 20:32:22.140923 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d00a75-5b72-4341-8618-1abb614b53cb" containerName="dnsmasq-dns" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.140994 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d00a75-5b72-4341-8618-1abb614b53cb" containerName="dnsmasq-dns" Sep 30 20:32:22 crc kubenswrapper[4919]: E0930 20:32:22.141059 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d00a75-5b72-4341-8618-1abb614b53cb" containerName="init" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.141109 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d00a75-5b72-4341-8618-1abb614b53cb" containerName="init" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.141340 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="40d00a75-5b72-4341-8618-1abb614b53cb" containerName="dnsmasq-dns" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.142051 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.144040 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-zrl48" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.146179 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.147700 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.147835 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.315004 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-openstack-config\") pod \"openstackclient\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.315112 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-openstack-config-secret\") pod \"openstackclient\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.315199 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.315263 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t982\" (UniqueName: \"kubernetes.io/projected/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-kube-api-access-7t982\") pod \"openstackclient\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.416687 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-openstack-config\") pod \"openstackclient\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.416971 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-openstack-config-secret\") pod \"openstackclient\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.417074 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.417152 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t982\" (UniqueName: \"kubernetes.io/projected/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-kube-api-access-7t982\") pod \"openstackclient\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.418182 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-openstack-config\") pod \"openstackclient\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.426113 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.426327 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-openstack-config-secret\") pod \"openstackclient\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.456004 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t982\" (UniqueName: \"kubernetes.io/projected/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-kube-api-access-7t982\") pod \"openstackclient\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.482690 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 20:32:22 crc kubenswrapper[4919]: I0930 20:32:22.994390 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 20:32:23 crc kubenswrapper[4919]: W0930 20:32:23.001891 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c681877_84e0_4fd4_ab4a_e13fe3d4da9e.slice/crio-6de66d4aed64bcb58433dfa468db969cf89870737c1a0dc740a3f962fbe0bb76 WatchSource:0}: Error finding container 6de66d4aed64bcb58433dfa468db969cf89870737c1a0dc740a3f962fbe0bb76: Status 404 returned error can't find the container with id 6de66d4aed64bcb58433dfa468db969cf89870737c1a0dc740a3f962fbe0bb76 Sep 30 20:32:23 crc kubenswrapper[4919]: I0930 20:32:23.059941 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e","Type":"ContainerStarted","Data":"6de66d4aed64bcb58433dfa468db969cf89870737c1a0dc740a3f962fbe0bb76"} Sep 30 20:32:23 crc kubenswrapper[4919]: I0930 20:32:23.664143 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40d00a75-5b72-4341-8618-1abb614b53cb" path="/var/lib/kubelet/pods/40d00a75-5b72-4341-8618-1abb614b53cb/volumes" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.094907 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sp45k" event={"ID":"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4","Type":"ContainerStarted","Data":"a2bd35679f61d726a7c65c57e3521c963095ad4b26c9ffc459f4eeae3093670c"} Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.122348 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-sp45k" podStartSLOduration=2.21919024 podStartE2EDuration="41.122329508s" podCreationTimestamp="2025-09-30 20:31:43 +0000 UTC" firstStartedPulling="2025-09-30 20:31:44.1936071 +0000 UTC m=+1089.309640227" lastFinishedPulling="2025-09-30 20:32:23.096746378 +0000 UTC m=+1128.212779495" observedRunningTime="2025-09-30 20:32:24.119575149 +0000 UTC m=+1129.235608276" watchObservedRunningTime="2025-09-30 20:32:24.122329508 +0000 UTC m=+1129.238362645" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.239751 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-755c6889fd-n5x5p" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.162:9311/healthcheck\": read tcp 10.217.0.2:49012->10.217.0.162:9311: read: connection reset by peer" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.240099 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-755c6889fd-n5x5p" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.162:9311/healthcheck\": read tcp 10.217.0.2:49028->10.217.0.162:9311: read: connection reset by peer" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.708166 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.864958 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-combined-ca-bundle\") pod \"5d59e33b-2daa-46c5-9022-2b3509e817e8\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.865018 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swkxm\" (UniqueName: \"kubernetes.io/projected/5d59e33b-2daa-46c5-9022-2b3509e817e8-kube-api-access-swkxm\") pod \"5d59e33b-2daa-46c5-9022-2b3509e817e8\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.865106 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-config-data\") pod \"5d59e33b-2daa-46c5-9022-2b3509e817e8\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.865150 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d59e33b-2daa-46c5-9022-2b3509e817e8-logs\") pod \"5d59e33b-2daa-46c5-9022-2b3509e817e8\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.865193 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-config-data-custom\") pod \"5d59e33b-2daa-46c5-9022-2b3509e817e8\" (UID: \"5d59e33b-2daa-46c5-9022-2b3509e817e8\") " Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.868024 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d59e33b-2daa-46c5-9022-2b3509e817e8-logs" (OuterVolumeSpecName: "logs") pod "5d59e33b-2daa-46c5-9022-2b3509e817e8" (UID: "5d59e33b-2daa-46c5-9022-2b3509e817e8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.872366 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5d59e33b-2daa-46c5-9022-2b3509e817e8" (UID: "5d59e33b-2daa-46c5-9022-2b3509e817e8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.877988 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d59e33b-2daa-46c5-9022-2b3509e817e8-kube-api-access-swkxm" (OuterVolumeSpecName: "kube-api-access-swkxm") pod "5d59e33b-2daa-46c5-9022-2b3509e817e8" (UID: "5d59e33b-2daa-46c5-9022-2b3509e817e8"). InnerVolumeSpecName "kube-api-access-swkxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.897941 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d59e33b-2daa-46c5-9022-2b3509e817e8" (UID: "5d59e33b-2daa-46c5-9022-2b3509e817e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.921182 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-config-data" (OuterVolumeSpecName: "config-data") pod "5d59e33b-2daa-46c5-9022-2b3509e817e8" (UID: "5d59e33b-2daa-46c5-9022-2b3509e817e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.966794 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.966828 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swkxm\" (UniqueName: \"kubernetes.io/projected/5d59e33b-2daa-46c5-9022-2b3509e817e8-kube-api-access-swkxm\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.966840 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.966848 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5d59e33b-2daa-46c5-9022-2b3509e817e8-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:24 crc kubenswrapper[4919]: I0930 20:32:24.966858 4919 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d59e33b-2daa-46c5-9022-2b3509e817e8-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.106590 4919 generic.go:334] "Generic (PLEG): container finished" podID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerID="f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298" exitCode=0 Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.106646 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-755c6889fd-n5x5p" event={"ID":"5d59e33b-2daa-46c5-9022-2b3509e817e8","Type":"ContainerDied","Data":"f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298"} Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.106697 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-755c6889fd-n5x5p" event={"ID":"5d59e33b-2daa-46c5-9022-2b3509e817e8","Type":"ContainerDied","Data":"0b85a10b57e7409e84168a6392c7161e860a11e9b7511979cd2d15bec95622fe"} Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.106692 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-755c6889fd-n5x5p" Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.106772 4919 scope.go:117] "RemoveContainer" containerID="f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298" Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.158200 4919 scope.go:117] "RemoveContainer" containerID="f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767" Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.163601 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-755c6889fd-n5x5p"] Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.169778 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-755c6889fd-n5x5p"] Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.183032 4919 scope.go:117] "RemoveContainer" containerID="f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298" Sep 30 20:32:25 crc kubenswrapper[4919]: E0930 20:32:25.183663 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298\": container with ID starting with f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298 not found: ID does not exist" containerID="f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298" Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.183711 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298"} err="failed to get container status \"f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298\": rpc error: code = NotFound desc = could not find container \"f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298\": container with ID starting with f33bb7acc58d2747fe7f5a55fcf8bcf45d1e430e90ade74295526caa80cc4298 not found: ID does not exist" Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.183734 4919 scope.go:117] "RemoveContainer" containerID="f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767" Sep 30 20:32:25 crc kubenswrapper[4919]: E0930 20:32:25.184555 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767\": container with ID starting with f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767 not found: ID does not exist" containerID="f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767" Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.184598 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767"} err="failed to get container status \"f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767\": rpc error: code = NotFound desc = could not find container \"f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767\": container with ID starting with f72a69d86c0f40f36d89d3510bf0b79ae0cd875ce4c2d2f108d0b2e0a1c14767 not found: ID does not exist" Sep 30 20:32:25 crc kubenswrapper[4919]: I0930 20:32:25.645446 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" path="/var/lib/kubelet/pods/5d59e33b-2daa-46c5-9022-2b3509e817e8/volumes" Sep 30 20:32:26 crc kubenswrapper[4919]: I0930 20:32:26.063322 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:32:26 crc kubenswrapper[4919]: I0930 20:32:26.063418 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:32:26 crc kubenswrapper[4919]: I0930 20:32:26.063492 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:32:26 crc kubenswrapper[4919]: I0930 20:32:26.064656 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"266a47211086852ebceb8347506c7f46056112506f6f3e1b6a4412456d9a3ed6"} pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:32:26 crc kubenswrapper[4919]: I0930 20:32:26.064757 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" containerID="cri-o://266a47211086852ebceb8347506c7f46056112506f6f3e1b6a4412456d9a3ed6" gracePeriod=600 Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.146672 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb371a63-6d82-453e-930e-656710b97f10" containerID="266a47211086852ebceb8347506c7f46056112506f6f3e1b6a4412456d9a3ed6" exitCode=0 Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.146956 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerDied","Data":"266a47211086852ebceb8347506c7f46056112506f6f3e1b6a4412456d9a3ed6"} Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.146983 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"50b0f3b522dc60e1e18fadcde7bd6a100190635e5277992da10bb56412db1f04"} Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.147000 4919 scope.go:117] "RemoveContainer" containerID="7330287e87c2c36810a07467a4c3caedfb96311988e76c64c3eedda691a5f9f5" Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.992623 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-79b575f787-8gljl"] Sep 30 20:32:27 crc kubenswrapper[4919]: E0930 20:32:27.993312 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerName="barbican-api-log" Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.993329 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerName="barbican-api-log" Sep 30 20:32:27 crc kubenswrapper[4919]: E0930 20:32:27.993366 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerName="barbican-api" Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.993374 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerName="barbican-api" Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.993830 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerName="barbican-api-log" Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.993901 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d59e33b-2daa-46c5-9022-2b3509e817e8" containerName="barbican-api" Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.995125 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.997372 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.997688 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 30 20:32:27 crc kubenswrapper[4919]: I0930 20:32:27.997826 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.021424 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-79b575f787-8gljl"] Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.130792 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/371dfa65-4310-40a1-b28c-74f5ec1071fd-config-data\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.130868 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tqfb\" (UniqueName: \"kubernetes.io/projected/371dfa65-4310-40a1-b28c-74f5ec1071fd-kube-api-access-2tqfb\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.130948 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/371dfa65-4310-40a1-b28c-74f5ec1071fd-combined-ca-bundle\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.130973 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/371dfa65-4310-40a1-b28c-74f5ec1071fd-public-tls-certs\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.131002 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/371dfa65-4310-40a1-b28c-74f5ec1071fd-etc-swift\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.131022 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/371dfa65-4310-40a1-b28c-74f5ec1071fd-run-httpd\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.131235 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/371dfa65-4310-40a1-b28c-74f5ec1071fd-log-httpd\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.131284 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/371dfa65-4310-40a1-b28c-74f5ec1071fd-internal-tls-certs\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.233042 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/371dfa65-4310-40a1-b28c-74f5ec1071fd-config-data\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.233326 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tqfb\" (UniqueName: \"kubernetes.io/projected/371dfa65-4310-40a1-b28c-74f5ec1071fd-kube-api-access-2tqfb\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.233402 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/371dfa65-4310-40a1-b28c-74f5ec1071fd-combined-ca-bundle\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.233432 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/371dfa65-4310-40a1-b28c-74f5ec1071fd-public-tls-certs\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.233451 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/371dfa65-4310-40a1-b28c-74f5ec1071fd-etc-swift\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.233480 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/371dfa65-4310-40a1-b28c-74f5ec1071fd-run-httpd\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.233525 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/371dfa65-4310-40a1-b28c-74f5ec1071fd-log-httpd\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.233543 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/371dfa65-4310-40a1-b28c-74f5ec1071fd-internal-tls-certs\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.236586 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/371dfa65-4310-40a1-b28c-74f5ec1071fd-run-httpd\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.236827 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/371dfa65-4310-40a1-b28c-74f5ec1071fd-log-httpd\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.240473 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/371dfa65-4310-40a1-b28c-74f5ec1071fd-public-tls-certs\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.240484 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/371dfa65-4310-40a1-b28c-74f5ec1071fd-internal-tls-certs\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.241790 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/371dfa65-4310-40a1-b28c-74f5ec1071fd-combined-ca-bundle\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.248104 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/371dfa65-4310-40a1-b28c-74f5ec1071fd-config-data\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.255406 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/371dfa65-4310-40a1-b28c-74f5ec1071fd-etc-swift\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.260409 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tqfb\" (UniqueName: \"kubernetes.io/projected/371dfa65-4310-40a1-b28c-74f5ec1071fd-kube-api-access-2tqfb\") pod \"swift-proxy-79b575f787-8gljl\" (UID: \"371dfa65-4310-40a1-b28c-74f5ec1071fd\") " pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.317170 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.420698 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.420963 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="792b4f4f-31af-4f32-a7b9-2af615779e95" containerName="glance-log" containerID="cri-o://09366126cc9eafe4b8bf9f0d6429bfd6bfb77fbc8184b11ca9cd980b0f43692b" gracePeriod=30 Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.421046 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="792b4f4f-31af-4f32-a7b9-2af615779e95" containerName="glance-httpd" containerID="cri-o://d525eed0dbc2683a937baa5a4d92c94dc0dafc867dbcd9bb401b42bc1d42ef09" gracePeriod=30 Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.912244 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.912891 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="ceilometer-central-agent" containerID="cri-o://f9991d6556f27a212ed09be36de35ca08657cbdb6c5561c77e5b46195010ca45" gracePeriod=30 Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.913461 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="proxy-httpd" containerID="cri-o://5ced391738ae37ac2cd1d5601fe2be75763a2c5a2e8f464816599363ecc3004c" gracePeriod=30 Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.913514 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="sg-core" containerID="cri-o://37970fcfd99d592da7cfb1be66d2d1c2d2d38199212f2c2a827c3f1f0ef928ee" gracePeriod=30 Sep 30 20:32:28 crc kubenswrapper[4919]: I0930 20:32:28.913546 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="ceilometer-notification-agent" containerID="cri-o://1a02f2298e590e7a2260a1b9d9f7eb504a220d69bbdc92058fe567b704966d54" gracePeriod=30 Sep 30 20:32:29 crc kubenswrapper[4919]: I0930 20:32:29.027001 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.166:3000/\": read tcp 10.217.0.2:42046->10.217.0.166:3000: read: connection reset by peer" Sep 30 20:32:29 crc kubenswrapper[4919]: I0930 20:32:29.182340 4919 generic.go:334] "Generic (PLEG): container finished" podID="792b4f4f-31af-4f32-a7b9-2af615779e95" containerID="09366126cc9eafe4b8bf9f0d6429bfd6bfb77fbc8184b11ca9cd980b0f43692b" exitCode=143 Sep 30 20:32:29 crc kubenswrapper[4919]: I0930 20:32:29.182479 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"792b4f4f-31af-4f32-a7b9-2af615779e95","Type":"ContainerDied","Data":"09366126cc9eafe4b8bf9f0d6429bfd6bfb77fbc8184b11ca9cd980b0f43692b"} Sep 30 20:32:29 crc kubenswrapper[4919]: I0930 20:32:29.191004 4919 generic.go:334] "Generic (PLEG): container finished" podID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerID="5ced391738ae37ac2cd1d5601fe2be75763a2c5a2e8f464816599363ecc3004c" exitCode=0 Sep 30 20:32:29 crc kubenswrapper[4919]: I0930 20:32:29.191043 4919 generic.go:334] "Generic (PLEG): container finished" podID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerID="37970fcfd99d592da7cfb1be66d2d1c2d2d38199212f2c2a827c3f1f0ef928ee" exitCode=2 Sep 30 20:32:29 crc kubenswrapper[4919]: I0930 20:32:29.191101 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d317adc-ca3d-45f8-b364-680ae2c9fe18","Type":"ContainerDied","Data":"5ced391738ae37ac2cd1d5601fe2be75763a2c5a2e8f464816599363ecc3004c"} Sep 30 20:32:29 crc kubenswrapper[4919]: I0930 20:32:29.191139 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d317adc-ca3d-45f8-b364-680ae2c9fe18","Type":"ContainerDied","Data":"37970fcfd99d592da7cfb1be66d2d1c2d2d38199212f2c2a827c3f1f0ef928ee"} Sep 30 20:32:29 crc kubenswrapper[4919]: I0930 20:32:29.193053 4919 generic.go:334] "Generic (PLEG): container finished" podID="8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" containerID="a2bd35679f61d726a7c65c57e3521c963095ad4b26c9ffc459f4eeae3093670c" exitCode=0 Sep 30 20:32:29 crc kubenswrapper[4919]: I0930 20:32:29.193096 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sp45k" event={"ID":"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4","Type":"ContainerDied","Data":"a2bd35679f61d726a7c65c57e3521c963095ad4b26c9ffc459f4eeae3093670c"} Sep 30 20:32:30 crc kubenswrapper[4919]: I0930 20:32:30.214728 4919 generic.go:334] "Generic (PLEG): container finished" podID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerID="f9991d6556f27a212ed09be36de35ca08657cbdb6c5561c77e5b46195010ca45" exitCode=0 Sep 30 20:32:30 crc kubenswrapper[4919]: I0930 20:32:30.214819 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d317adc-ca3d-45f8-b364-680ae2c9fe18","Type":"ContainerDied","Data":"f9991d6556f27a212ed09be36de35ca08657cbdb6c5561c77e5b46195010ca45"} Sep 30 20:32:31 crc kubenswrapper[4919]: I0930 20:32:31.732674 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:32:31 crc kubenswrapper[4919]: I0930 20:32:31.733304 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="69e2e475-a270-4817-b14b-fbb6d78abfa3" containerName="glance-log" containerID="cri-o://93c16f287f1951d83dcf001b24d242503e0bba26feb852174ac0f9f76c3dd18d" gracePeriod=30 Sep 30 20:32:31 crc kubenswrapper[4919]: I0930 20:32:31.733434 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="69e2e475-a270-4817-b14b-fbb6d78abfa3" containerName="glance-httpd" containerID="cri-o://cd695b0b60075119c80f53c247d13812b6754bdc0aba11733eae3260ba76972e" gracePeriod=30 Sep 30 20:32:32 crc kubenswrapper[4919]: I0930 20:32:32.237701 4919 generic.go:334] "Generic (PLEG): container finished" podID="69e2e475-a270-4817-b14b-fbb6d78abfa3" containerID="93c16f287f1951d83dcf001b24d242503e0bba26feb852174ac0f9f76c3dd18d" exitCode=143 Sep 30 20:32:32 crc kubenswrapper[4919]: I0930 20:32:32.237772 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"69e2e475-a270-4817-b14b-fbb6d78abfa3","Type":"ContainerDied","Data":"93c16f287f1951d83dcf001b24d242503e0bba26feb852174ac0f9f76c3dd18d"} Sep 30 20:32:32 crc kubenswrapper[4919]: I0930 20:32:32.241269 4919 generic.go:334] "Generic (PLEG): container finished" podID="792b4f4f-31af-4f32-a7b9-2af615779e95" containerID="d525eed0dbc2683a937baa5a4d92c94dc0dafc867dbcd9bb401b42bc1d42ef09" exitCode=0 Sep 30 20:32:32 crc kubenswrapper[4919]: I0930 20:32:32.241327 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"792b4f4f-31af-4f32-a7b9-2af615779e95","Type":"ContainerDied","Data":"d525eed0dbc2683a937baa5a4d92c94dc0dafc867dbcd9bb401b42bc1d42ef09"} Sep 30 20:32:32 crc kubenswrapper[4919]: I0930 20:32:32.243710 4919 generic.go:334] "Generic (PLEG): container finished" podID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerID="1a02f2298e590e7a2260a1b9d9f7eb504a220d69bbdc92058fe567b704966d54" exitCode=0 Sep 30 20:32:32 crc kubenswrapper[4919]: I0930 20:32:32.243742 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d317adc-ca3d-45f8-b364-680ae2c9fe18","Type":"ContainerDied","Data":"1a02f2298e590e7a2260a1b9d9f7eb504a220d69bbdc92058fe567b704966d54"} Sep 30 20:32:33 crc kubenswrapper[4919]: I0930 20:32:33.829003 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sp45k" Sep 30 20:32:33 crc kubenswrapper[4919]: I0930 20:32:33.939952 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-db-sync-config-data\") pod \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " Sep 30 20:32:33 crc kubenswrapper[4919]: I0930 20:32:33.940021 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wlgl\" (UniqueName: \"kubernetes.io/projected/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-kube-api-access-8wlgl\") pod \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " Sep 30 20:32:33 crc kubenswrapper[4919]: I0930 20:32:33.940053 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-combined-ca-bundle\") pod \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " Sep 30 20:32:33 crc kubenswrapper[4919]: I0930 20:32:33.940070 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-scripts\") pod \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " Sep 30 20:32:33 crc kubenswrapper[4919]: I0930 20:32:33.940104 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-etc-machine-id\") pod \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " Sep 30 20:32:33 crc kubenswrapper[4919]: I0930 20:32:33.940135 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-config-data\") pod \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\" (UID: \"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4\") " Sep 30 20:32:33 crc kubenswrapper[4919]: I0930 20:32:33.945786 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" (UID: "8d660eb4-7718-4b2b-a834-9b7d0d2b64a4"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:32:33 crc kubenswrapper[4919]: I0930 20:32:33.946080 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" (UID: "8d660eb4-7718-4b2b-a834-9b7d0d2b64a4"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:33 crc kubenswrapper[4919]: I0930 20:32:33.949151 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-scripts" (OuterVolumeSpecName: "scripts") pod "8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" (UID: "8d660eb4-7718-4b2b-a834-9b7d0d2b64a4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:33 crc kubenswrapper[4919]: I0930 20:32:33.955498 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-kube-api-access-8wlgl" (OuterVolumeSpecName: "kube-api-access-8wlgl") pod "8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" (UID: "8d660eb4-7718-4b2b-a834-9b7d0d2b64a4"). InnerVolumeSpecName "kube-api-access-8wlgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:33 crc kubenswrapper[4919]: I0930 20:32:33.983676 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" (UID: "8d660eb4-7718-4b2b-a834-9b7d0d2b64a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.031292 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.034473 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-config-data" (OuterVolumeSpecName: "config-data") pod "8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" (UID: "8d660eb4-7718-4b2b-a834-9b7d0d2b64a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.043456 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wlgl\" (UniqueName: \"kubernetes.io/projected/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-kube-api-access-8wlgl\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.043492 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.043501 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.043509 4919 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.043518 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.043527 4919 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.144316 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-config-data\") pod \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.144893 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d317adc-ca3d-45f8-b364-680ae2c9fe18-run-httpd\") pod \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.144956 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d317adc-ca3d-45f8-b364-680ae2c9fe18-log-httpd\") pod \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.145028 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-scripts\") pod \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.145071 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkggz\" (UniqueName: \"kubernetes.io/projected/9d317adc-ca3d-45f8-b364-680ae2c9fe18-kube-api-access-wkggz\") pod \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.145129 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-sg-core-conf-yaml\") pod \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.145176 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-combined-ca-bundle\") pod \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\" (UID: \"9d317adc-ca3d-45f8-b364-680ae2c9fe18\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.145866 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d317adc-ca3d-45f8-b364-680ae2c9fe18-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9d317adc-ca3d-45f8-b364-680ae2c9fe18" (UID: "9d317adc-ca3d-45f8-b364-680ae2c9fe18"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.146152 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d317adc-ca3d-45f8-b364-680ae2c9fe18-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9d317adc-ca3d-45f8-b364-680ae2c9fe18" (UID: "9d317adc-ca3d-45f8-b364-680ae2c9fe18"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.146790 4919 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d317adc-ca3d-45f8-b364-680ae2c9fe18-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.146845 4919 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d317adc-ca3d-45f8-b364-680ae2c9fe18-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.148910 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.149430 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-scripts" (OuterVolumeSpecName: "scripts") pod "9d317adc-ca3d-45f8-b364-680ae2c9fe18" (UID: "9d317adc-ca3d-45f8-b364-680ae2c9fe18"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.154472 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d317adc-ca3d-45f8-b364-680ae2c9fe18-kube-api-access-wkggz" (OuterVolumeSpecName: "kube-api-access-wkggz") pod "9d317adc-ca3d-45f8-b364-680ae2c9fe18" (UID: "9d317adc-ca3d-45f8-b364-680ae2c9fe18"). InnerVolumeSpecName "kube-api-access-wkggz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.198054 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9d317adc-ca3d-45f8-b364-680ae2c9fe18" (UID: "9d317adc-ca3d-45f8-b364-680ae2c9fe18"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.249200 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.249257 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkggz\" (UniqueName: \"kubernetes.io/projected/9d317adc-ca3d-45f8-b364-680ae2c9fe18-kube-api-access-wkggz\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.249271 4919 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.281919 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sp45k" event={"ID":"8d660eb4-7718-4b2b-a834-9b7d0d2b64a4","Type":"ContainerDied","Data":"33302023c854ad5dafd4f296452531bf4c5aa8889c17e30eefc7eaedf8ff8af4"} Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.281954 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33302023c854ad5dafd4f296452531bf4c5aa8889c17e30eefc7eaedf8ff8af4" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.282021 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sp45k" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.289474 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d317adc-ca3d-45f8-b364-680ae2c9fe18" (UID: "9d317adc-ca3d-45f8-b364-680ae2c9fe18"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.292964 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e","Type":"ContainerStarted","Data":"8de83e3f365bbddd01d15ab2d3899b89bf7ec111571f7724f136bfe4ccac6364"} Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.296441 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.296470 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"792b4f4f-31af-4f32-a7b9-2af615779e95","Type":"ContainerDied","Data":"42f6e79802e75751b94a5f806192e4f9c05f57d03326f34cbd894b0f592ae9a5"} Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.298050 4919 scope.go:117] "RemoveContainer" containerID="d525eed0dbc2683a937baa5a4d92c94dc0dafc867dbcd9bb401b42bc1d42ef09" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.299745 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9d317adc-ca3d-45f8-b364-680ae2c9fe18","Type":"ContainerDied","Data":"a8803a8584f57b3a435c76f07555f8b3ea10162d268c52def2db4daa1673277a"} Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.299807 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.307622 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.623114642 podStartE2EDuration="12.307600101s" podCreationTimestamp="2025-09-30 20:32:22 +0000 UTC" firstStartedPulling="2025-09-30 20:32:23.004103001 +0000 UTC m=+1128.120136128" lastFinishedPulling="2025-09-30 20:32:33.68858846 +0000 UTC m=+1138.804621587" observedRunningTime="2025-09-30 20:32:34.307465017 +0000 UTC m=+1139.423498144" watchObservedRunningTime="2025-09-30 20:32:34.307600101 +0000 UTC m=+1139.423633228" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.323105 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-config-data" (OuterVolumeSpecName: "config-data") pod "9d317adc-ca3d-45f8-b364-680ae2c9fe18" (UID: "9d317adc-ca3d-45f8-b364-680ae2c9fe18"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.329549 4919 scope.go:117] "RemoveContainer" containerID="09366126cc9eafe4b8bf9f0d6429bfd6bfb77fbc8184b11ca9cd980b0f43692b" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.350024 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-config-data\") pod \"792b4f4f-31af-4f32-a7b9-2af615779e95\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.350156 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"792b4f4f-31af-4f32-a7b9-2af615779e95\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.350183 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-combined-ca-bundle\") pod \"792b4f4f-31af-4f32-a7b9-2af615779e95\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.350248 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/792b4f4f-31af-4f32-a7b9-2af615779e95-logs\") pod \"792b4f4f-31af-4f32-a7b9-2af615779e95\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.350279 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/792b4f4f-31af-4f32-a7b9-2af615779e95-httpd-run\") pod \"792b4f4f-31af-4f32-a7b9-2af615779e95\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.350313 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-public-tls-certs\") pod \"792b4f4f-31af-4f32-a7b9-2af615779e95\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.350429 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhq5v\" (UniqueName: \"kubernetes.io/projected/792b4f4f-31af-4f32-a7b9-2af615779e95-kube-api-access-zhq5v\") pod \"792b4f4f-31af-4f32-a7b9-2af615779e95\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.350461 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-scripts\") pod \"792b4f4f-31af-4f32-a7b9-2af615779e95\" (UID: \"792b4f4f-31af-4f32-a7b9-2af615779e95\") " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.350890 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.350912 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d317adc-ca3d-45f8-b364-680ae2c9fe18-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.352790 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/792b4f4f-31af-4f32-a7b9-2af615779e95-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "792b4f4f-31af-4f32-a7b9-2af615779e95" (UID: "792b4f4f-31af-4f32-a7b9-2af615779e95"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.352893 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/792b4f4f-31af-4f32-a7b9-2af615779e95-logs" (OuterVolumeSpecName: "logs") pod "792b4f4f-31af-4f32-a7b9-2af615779e95" (UID: "792b4f4f-31af-4f32-a7b9-2af615779e95"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.354747 4919 scope.go:117] "RemoveContainer" containerID="5ced391738ae37ac2cd1d5601fe2be75763a2c5a2e8f464816599363ecc3004c" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.357177 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/792b4f4f-31af-4f32-a7b9-2af615779e95-kube-api-access-zhq5v" (OuterVolumeSpecName: "kube-api-access-zhq5v") pod "792b4f4f-31af-4f32-a7b9-2af615779e95" (UID: "792b4f4f-31af-4f32-a7b9-2af615779e95"). InnerVolumeSpecName "kube-api-access-zhq5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.357784 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-scripts" (OuterVolumeSpecName: "scripts") pod "792b4f4f-31af-4f32-a7b9-2af615779e95" (UID: "792b4f4f-31af-4f32-a7b9-2af615779e95"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.366866 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "792b4f4f-31af-4f32-a7b9-2af615779e95" (UID: "792b4f4f-31af-4f32-a7b9-2af615779e95"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.370876 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-79b575f787-8gljl"] Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.386011 4919 scope.go:117] "RemoveContainer" containerID="37970fcfd99d592da7cfb1be66d2d1c2d2d38199212f2c2a827c3f1f0ef928ee" Sep 30 20:32:34 crc kubenswrapper[4919]: W0930 20:32:34.390747 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod371dfa65_4310_40a1_b28c_74f5ec1071fd.slice/crio-849ba257d32ca7c2760d14db84e8dbfd8235254e23ceb796761270158864feed WatchSource:0}: Error finding container 849ba257d32ca7c2760d14db84e8dbfd8235254e23ceb796761270158864feed: Status 404 returned error can't find the container with id 849ba257d32ca7c2760d14db84e8dbfd8235254e23ceb796761270158864feed Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.398900 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "792b4f4f-31af-4f32-a7b9-2af615779e95" (UID: "792b4f4f-31af-4f32-a7b9-2af615779e95"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.410423 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-config-data" (OuterVolumeSpecName: "config-data") pod "792b4f4f-31af-4f32-a7b9-2af615779e95" (UID: "792b4f4f-31af-4f32-a7b9-2af615779e95"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.411226 4919 scope.go:117] "RemoveContainer" containerID="1a02f2298e590e7a2260a1b9d9f7eb504a220d69bbdc92058fe567b704966d54" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.432056 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "792b4f4f-31af-4f32-a7b9-2af615779e95" (UID: "792b4f4f-31af-4f32-a7b9-2af615779e95"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.433607 4919 scope.go:117] "RemoveContainer" containerID="f9991d6556f27a212ed09be36de35ca08657cbdb6c5561c77e5b46195010ca45" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.453835 4919 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.453878 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.453891 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/792b4f4f-31af-4f32-a7b9-2af615779e95-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.453903 4919 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/792b4f4f-31af-4f32-a7b9-2af615779e95-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.453917 4919 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.453928 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhq5v\" (UniqueName: \"kubernetes.io/projected/792b4f4f-31af-4f32-a7b9-2af615779e95-kube-api-access-zhq5v\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.453940 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.453950 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/792b4f4f-31af-4f32-a7b9-2af615779e95-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.482556 4919 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.555886 4919 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.742614 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.756818 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.774737 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.783626 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.809690 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:32:34 crc kubenswrapper[4919]: E0930 20:32:34.810265 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="ceilometer-notification-agent" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.810360 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="ceilometer-notification-agent" Sep 30 20:32:34 crc kubenswrapper[4919]: E0930 20:32:34.810428 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="sg-core" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.810488 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="sg-core" Sep 30 20:32:34 crc kubenswrapper[4919]: E0930 20:32:34.810546 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="792b4f4f-31af-4f32-a7b9-2af615779e95" containerName="glance-log" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.810597 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="792b4f4f-31af-4f32-a7b9-2af615779e95" containerName="glance-log" Sep 30 20:32:34 crc kubenswrapper[4919]: E0930 20:32:34.810655 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="proxy-httpd" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.810708 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="proxy-httpd" Sep 30 20:32:34 crc kubenswrapper[4919]: E0930 20:32:34.810758 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" containerName="cinder-db-sync" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.810811 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" containerName="cinder-db-sync" Sep 30 20:32:34 crc kubenswrapper[4919]: E0930 20:32:34.810879 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="ceilometer-central-agent" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.810929 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="ceilometer-central-agent" Sep 30 20:32:34 crc kubenswrapper[4919]: E0930 20:32:34.810996 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="792b4f4f-31af-4f32-a7b9-2af615779e95" containerName="glance-httpd" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.811053 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="792b4f4f-31af-4f32-a7b9-2af615779e95" containerName="glance-httpd" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.811313 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="proxy-httpd" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.811386 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="ceilometer-notification-agent" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.811450 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="792b4f4f-31af-4f32-a7b9-2af615779e95" containerName="glance-httpd" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.811508 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" containerName="cinder-db-sync" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.811561 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="792b4f4f-31af-4f32-a7b9-2af615779e95" containerName="glance-log" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.811613 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="ceilometer-central-agent" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.811682 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" containerName="sg-core" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.812895 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.817743 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.817768 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.826007 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.846511 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.848535 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.855362 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.855605 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.861358 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.966410 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.966623 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-config-data\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.966701 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.966798 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.966870 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-logs\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.966941 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr22f\" (UniqueName: \"kubernetes.io/projected/830bc004-4464-408b-9696-1c69dcbcc793-kube-api-access-tr22f\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.967003 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/830bc004-4464-408b-9696-1c69dcbcc793-log-httpd\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.967075 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/830bc004-4464-408b-9696-1c69dcbcc793-run-httpd\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.967160 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.967418 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-scripts\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.967495 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.967572 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-scripts\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.967633 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-config-data\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.967706 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:34 crc kubenswrapper[4919]: I0930 20:32:34.967781 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b79bs\" (UniqueName: \"kubernetes.io/projected/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-kube-api-access-b79bs\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.069988 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.070199 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-config-data\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.070329 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.070424 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.070511 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-logs\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.070592 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr22f\" (UniqueName: \"kubernetes.io/projected/830bc004-4464-408b-9696-1c69dcbcc793-kube-api-access-tr22f\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.070655 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/830bc004-4464-408b-9696-1c69dcbcc793-log-httpd\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.070727 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/830bc004-4464-408b-9696-1c69dcbcc793-run-httpd\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.070809 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.070896 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-scripts\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.070963 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.071039 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-scripts\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.071109 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-config-data\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.071173 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.071269 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b79bs\" (UniqueName: \"kubernetes.io/projected/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-kube-api-access-b79bs\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.072491 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/830bc004-4464-408b-9696-1c69dcbcc793-run-httpd\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.078960 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.086904 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.094620 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.099394 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/830bc004-4464-408b-9696-1c69dcbcc793-log-httpd\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.099522 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-logs\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.101866 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-config-data\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.102774 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-scripts\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.102846 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.104899 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.106499 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-scripts\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.108711 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b79bs\" (UniqueName: \"kubernetes.io/projected/bf52a250-e8fb-4bd1-a25e-2852fbfb0804-kube-api-access-b79bs\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.110377 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.113278 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.158702 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-config-data\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.166336 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.176555 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr22f\" (UniqueName: \"kubernetes.io/projected/830bc004-4464-408b-9696-1c69dcbcc793-kube-api-access-tr22f\") pod \"ceilometer-0\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.176841 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-zdqcx" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.177244 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.177468 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.177599 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.180838 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.198316 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.210270 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-mj85v"] Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.211811 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.224705 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-mj85v"] Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.283776 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.283866 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.283886 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-scripts\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.283900 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-config-data\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.283919 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr2xg\" (UniqueName: \"kubernetes.io/projected/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-kube-api-access-cr2xg\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.283993 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.288050 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"bf52a250-e8fb-4bd1-a25e-2852fbfb0804\") " pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.319653 4919 generic.go:334] "Generic (PLEG): container finished" podID="69e2e475-a270-4817-b14b-fbb6d78abfa3" containerID="cd695b0b60075119c80f53c247d13812b6754bdc0aba11733eae3260ba76972e" exitCode=0 Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.319743 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"69e2e475-a270-4817-b14b-fbb6d78abfa3","Type":"ContainerDied","Data":"cd695b0b60075119c80f53c247d13812b6754bdc0aba11733eae3260ba76972e"} Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.331134 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-79b575f787-8gljl" event={"ID":"371dfa65-4310-40a1-b28c-74f5ec1071fd","Type":"ContainerStarted","Data":"7daa36e8f2f77c3825f2c84ca134fd4ee87c981e419fdb9ff37a1de54832b381"} Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.331370 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-79b575f787-8gljl" event={"ID":"371dfa65-4310-40a1-b28c-74f5ec1071fd","Type":"ContainerStarted","Data":"389d90c28c5c73bfd470672f0a3a5142c6dec2dd411769ae1c67da8b86fa6322"} Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.331382 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-79b575f787-8gljl" event={"ID":"371dfa65-4310-40a1-b28c-74f5ec1071fd","Type":"ContainerStarted","Data":"849ba257d32ca7c2760d14db84e8dbfd8235254e23ceb796761270158864feed"} Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.331429 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.331459 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.340179 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.341589 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.347261 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.372067 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.379229 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-79b575f787-8gljl" podStartSLOduration=8.37919464 podStartE2EDuration="8.37919464s" podCreationTimestamp="2025-09-30 20:32:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:35.361028349 +0000 UTC m=+1140.477061486" watchObservedRunningTime="2025-09-30 20:32:35.37919464 +0000 UTC m=+1140.495227767" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.388128 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.388429 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-config\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.388660 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.388678 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-config-data\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.388692 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-scripts\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.388716 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr2xg\" (UniqueName: \"kubernetes.io/projected/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-kube-api-access-cr2xg\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.388743 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g47kt\" (UniqueName: \"kubernetes.io/projected/c6714658-0275-4c06-952f-d84e5121bd9d-kube-api-access-g47kt\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.388800 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.388824 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-dns-svc\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.388861 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.389137 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.389166 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.393523 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.396478 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.396523 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-config-data\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.397660 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-scripts\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.397832 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.445879 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.450542 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr2xg\" (UniqueName: \"kubernetes.io/projected/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-kube-api-access-cr2xg\") pod \"cinder-scheduler-0\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.490328 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491195 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-ovsdbserver-nb\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491237 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8dqf\" (UniqueName: \"kubernetes.io/projected/bf2c3535-58c7-4a25-aaa3-2050c302c729-kube-api-access-n8dqf\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491262 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491281 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-config-data-custom\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491296 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491315 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-config\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491350 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf2c3535-58c7-4a25-aaa3-2050c302c729-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491364 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-config-data\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491430 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-scripts\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491471 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g47kt\" (UniqueName: \"kubernetes.io/projected/c6714658-0275-4c06-952f-d84e5121bd9d-kube-api-access-g47kt\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491556 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf2c3535-58c7-4a25-aaa3-2050c302c729-logs\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491625 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-dns-svc\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.491718 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.492357 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-ovsdbserver-sb\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.492841 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-dns-swift-storage-0\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.493445 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-config\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.497229 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-dns-svc\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.497604 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.520855 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g47kt\" (UniqueName: \"kubernetes.io/projected/c6714658-0275-4c06-952f-d84e5121bd9d-kube-api-access-g47kt\") pod \"dnsmasq-dns-5784cf869f-mj85v\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.557098 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.592572 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf2c3535-58c7-4a25-aaa3-2050c302c729-logs\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.592683 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8dqf\" (UniqueName: \"kubernetes.io/projected/bf2c3535-58c7-4a25-aaa3-2050c302c729-kube-api-access-n8dqf\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.592707 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-config-data-custom\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.592722 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.592747 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf2c3535-58c7-4a25-aaa3-2050c302c729-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.592761 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-config-data\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.592782 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-scripts\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.593712 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf2c3535-58c7-4a25-aaa3-2050c302c729-logs\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.598953 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.599131 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf2c3535-58c7-4a25-aaa3-2050c302c729-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.599819 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-scripts\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.615287 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.616116 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-config-data-custom\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.620588 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-config-data\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.625878 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8dqf\" (UniqueName: \"kubernetes.io/projected/bf2c3535-58c7-4a25-aaa3-2050c302c729-kube-api-access-n8dqf\") pod \"cinder-api-0\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.650672 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="792b4f4f-31af-4f32-a7b9-2af615779e95" path="/var/lib/kubelet/pods/792b4f4f-31af-4f32-a7b9-2af615779e95/volumes" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.651469 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d317adc-ca3d-45f8-b364-680ae2c9fe18" path="/var/lib/kubelet/pods/9d317adc-ca3d-45f8-b364-680ae2c9fe18/volumes" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.680861 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.805041 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.816765 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.898349 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/69e2e475-a270-4817-b14b-fbb6d78abfa3-httpd-run\") pod \"69e2e475-a270-4817-b14b-fbb6d78abfa3\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.898403 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-config-data\") pod \"69e2e475-a270-4817-b14b-fbb6d78abfa3\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.898449 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjk29\" (UniqueName: \"kubernetes.io/projected/69e2e475-a270-4817-b14b-fbb6d78abfa3-kube-api-access-qjk29\") pod \"69e2e475-a270-4817-b14b-fbb6d78abfa3\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.898542 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"69e2e475-a270-4817-b14b-fbb6d78abfa3\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.898580 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-scripts\") pod \"69e2e475-a270-4817-b14b-fbb6d78abfa3\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.898632 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-internal-tls-certs\") pod \"69e2e475-a270-4817-b14b-fbb6d78abfa3\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.898663 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69e2e475-a270-4817-b14b-fbb6d78abfa3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "69e2e475-a270-4817-b14b-fbb6d78abfa3" (UID: "69e2e475-a270-4817-b14b-fbb6d78abfa3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.898673 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-combined-ca-bundle\") pod \"69e2e475-a270-4817-b14b-fbb6d78abfa3\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.898776 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69e2e475-a270-4817-b14b-fbb6d78abfa3-logs\") pod \"69e2e475-a270-4817-b14b-fbb6d78abfa3\" (UID: \"69e2e475-a270-4817-b14b-fbb6d78abfa3\") " Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.899251 4919 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/69e2e475-a270-4817-b14b-fbb6d78abfa3-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.899492 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69e2e475-a270-4817-b14b-fbb6d78abfa3-logs" (OuterVolumeSpecName: "logs") pod "69e2e475-a270-4817-b14b-fbb6d78abfa3" (UID: "69e2e475-a270-4817-b14b-fbb6d78abfa3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.907479 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "69e2e475-a270-4817-b14b-fbb6d78abfa3" (UID: "69e2e475-a270-4817-b14b-fbb6d78abfa3"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.922459 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69e2e475-a270-4817-b14b-fbb6d78abfa3-kube-api-access-qjk29" (OuterVolumeSpecName: "kube-api-access-qjk29") pod "69e2e475-a270-4817-b14b-fbb6d78abfa3" (UID: "69e2e475-a270-4817-b14b-fbb6d78abfa3"). InnerVolumeSpecName "kube-api-access-qjk29". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.933344 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-scripts" (OuterVolumeSpecName: "scripts") pod "69e2e475-a270-4817-b14b-fbb6d78abfa3" (UID: "69e2e475-a270-4817-b14b-fbb6d78abfa3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.961338 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "69e2e475-a270-4817-b14b-fbb6d78abfa3" (UID: "69e2e475-a270-4817-b14b-fbb6d78abfa3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:35 crc kubenswrapper[4919]: I0930 20:32:35.991078 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "69e2e475-a270-4817-b14b-fbb6d78abfa3" (UID: "69e2e475-a270-4817-b14b-fbb6d78abfa3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.004596 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjk29\" (UniqueName: \"kubernetes.io/projected/69e2e475-a270-4817-b14b-fbb6d78abfa3-kube-api-access-qjk29\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.004654 4919 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.004697 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.004713 4919 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.004724 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.004736 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69e2e475-a270-4817-b14b-fbb6d78abfa3-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.024395 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-config-data" (OuterVolumeSpecName: "config-data") pod "69e2e475-a270-4817-b14b-fbb6d78abfa3" (UID: "69e2e475-a270-4817-b14b-fbb6d78abfa3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.039876 4919 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 30 20:32:36 crc kubenswrapper[4919]: W0930 20:32:36.076128 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf52a250_e8fb_4bd1_a25e_2852fbfb0804.slice/crio-72efe012df60ded7f3bade022889cd0a78b7a9cb65a4d43345ae0482f0cb3a1f WatchSource:0}: Error finding container 72efe012df60ded7f3bade022889cd0a78b7a9cb65a4d43345ae0482f0cb3a1f: Status 404 returned error can't find the container with id 72efe012df60ded7f3bade022889cd0a78b7a9cb65a4d43345ae0482f0cb3a1f Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.086666 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.107287 4919 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.107324 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69e2e475-a270-4817-b14b-fbb6d78abfa3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.184799 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.213699 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-mj85v"] Sep 30 20:32:36 crc kubenswrapper[4919]: W0930 20:32:36.224953 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6714658_0275_4c06_952f_d84e5121bd9d.slice/crio-5ae98cee30cfa8541b7e466602d796c5f4ba599af5dd9dc15d32c51f94c99835 WatchSource:0}: Error finding container 5ae98cee30cfa8541b7e466602d796c5f4ba599af5dd9dc15d32c51f94c99835: Status 404 returned error can't find the container with id 5ae98cee30cfa8541b7e466602d796c5f4ba599af5dd9dc15d32c51f94c99835 Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.371429 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:32:36 crc kubenswrapper[4919]: W0930 20:32:36.387378 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf2c3535_58c7_4a25_aaa3_2050c302c729.slice/crio-9d540a0466463bec270aacfa412253cf7480d6f85429e4350b42d259dc128fb3 WatchSource:0}: Error finding container 9d540a0466463bec270aacfa412253cf7480d6f85429e4350b42d259dc128fb3: Status 404 returned error can't find the container with id 9d540a0466463bec270aacfa412253cf7480d6f85429e4350b42d259dc128fb3 Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.402397 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bf52a250-e8fb-4bd1-a25e-2852fbfb0804","Type":"ContainerStarted","Data":"72efe012df60ded7f3bade022889cd0a78b7a9cb65a4d43345ae0482f0cb3a1f"} Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.403624 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9247840a-b887-4d3d-b147-6f2d8dbb6fbe","Type":"ContainerStarted","Data":"0925ec3779a86614cb279f8bc10c7c21931ac76d1d904556545cf466b3a48f20"} Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.405196 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"830bc004-4464-408b-9696-1c69dcbcc793","Type":"ContainerStarted","Data":"218db87c25b8d4292b025cedfc2c474f49c17ec178e0d59e2fa2339458cc9da2"} Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.410694 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"69e2e475-a270-4817-b14b-fbb6d78abfa3","Type":"ContainerDied","Data":"fe08c8ac4b28d099463ea79311a6b2a10d4a883fde98fbb7cfb6223b4b40c495"} Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.410726 4919 scope.go:117] "RemoveContainer" containerID="cd695b0b60075119c80f53c247d13812b6754bdc0aba11733eae3260ba76972e" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.410831 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.418474 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" event={"ID":"c6714658-0275-4c06-952f-d84e5121bd9d","Type":"ContainerStarted","Data":"5ae98cee30cfa8541b7e466602d796c5f4ba599af5dd9dc15d32c51f94c99835"} Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.446865 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.476699 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.502265 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:32:36 crc kubenswrapper[4919]: E0930 20:32:36.502764 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69e2e475-a270-4817-b14b-fbb6d78abfa3" containerName="glance-httpd" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.502782 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="69e2e475-a270-4817-b14b-fbb6d78abfa3" containerName="glance-httpd" Sep 30 20:32:36 crc kubenswrapper[4919]: E0930 20:32:36.502832 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69e2e475-a270-4817-b14b-fbb6d78abfa3" containerName="glance-log" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.502842 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="69e2e475-a270-4817-b14b-fbb6d78abfa3" containerName="glance-log" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.503077 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="69e2e475-a270-4817-b14b-fbb6d78abfa3" containerName="glance-httpd" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.503105 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="69e2e475-a270-4817-b14b-fbb6d78abfa3" containerName="glance-log" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.503388 4919 scope.go:117] "RemoveContainer" containerID="93c16f287f1951d83dcf001b24d242503e0bba26feb852174ac0f9f76c3dd18d" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.509474 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.516525 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.516716 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.522572 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.617411 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a78d2c04-3656-417b-ace2-cba6a7e90060-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.617495 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a78d2c04-3656-417b-ace2-cba6a7e90060-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.617531 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llvwc\" (UniqueName: \"kubernetes.io/projected/a78d2c04-3656-417b-ace2-cba6a7e90060-kube-api-access-llvwc\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.617555 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a78d2c04-3656-417b-ace2-cba6a7e90060-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.617576 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a78d2c04-3656-417b-ace2-cba6a7e90060-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.617596 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.617626 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a78d2c04-3656-417b-ace2-cba6a7e90060-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.617651 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a78d2c04-3656-417b-ace2-cba6a7e90060-logs\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.720975 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a78d2c04-3656-417b-ace2-cba6a7e90060-logs\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.721268 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a78d2c04-3656-417b-ace2-cba6a7e90060-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.721370 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a78d2c04-3656-417b-ace2-cba6a7e90060-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.721403 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llvwc\" (UniqueName: \"kubernetes.io/projected/a78d2c04-3656-417b-ace2-cba6a7e90060-kube-api-access-llvwc\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.721425 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a78d2c04-3656-417b-ace2-cba6a7e90060-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.721451 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a78d2c04-3656-417b-ace2-cba6a7e90060-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.721471 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.721511 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a78d2c04-3656-417b-ace2-cba6a7e90060-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.722497 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a78d2c04-3656-417b-ace2-cba6a7e90060-logs\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.722772 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.723500 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a78d2c04-3656-417b-ace2-cba6a7e90060-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.728549 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a78d2c04-3656-417b-ace2-cba6a7e90060-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.728899 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a78d2c04-3656-417b-ace2-cba6a7e90060-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.743337 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a78d2c04-3656-417b-ace2-cba6a7e90060-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.743673 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a78d2c04-3656-417b-ace2-cba6a7e90060-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.754179 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llvwc\" (UniqueName: \"kubernetes.io/projected/a78d2c04-3656-417b-ace2-cba6a7e90060-kube-api-access-llvwc\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.778649 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"a78d2c04-3656-417b-ace2-cba6a7e90060\") " pod="openstack/glance-default-internal-api-0" Sep 30 20:32:36 crc kubenswrapper[4919]: I0930 20:32:36.849945 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:37 crc kubenswrapper[4919]: I0930 20:32:37.451540 4919 generic.go:334] "Generic (PLEG): container finished" podID="c6714658-0275-4c06-952f-d84e5121bd9d" containerID="5aea0a1c2c28d0e035cf58716118087ea692894503a86dd217f6d61d06ef79b0" exitCode=0 Sep 30 20:32:37 crc kubenswrapper[4919]: I0930 20:32:37.451617 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" event={"ID":"c6714658-0275-4c06-952f-d84e5121bd9d","Type":"ContainerDied","Data":"5aea0a1c2c28d0e035cf58716118087ea692894503a86dd217f6d61d06ef79b0"} Sep 30 20:32:37 crc kubenswrapper[4919]: I0930 20:32:37.458904 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bf52a250-e8fb-4bd1-a25e-2852fbfb0804","Type":"ContainerStarted","Data":"e8d981b7dbd99c4a87f0a4782af4be982426b09737ef005917501863d712dd6f"} Sep 30 20:32:37 crc kubenswrapper[4919]: I0930 20:32:37.460985 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"830bc004-4464-408b-9696-1c69dcbcc793","Type":"ContainerStarted","Data":"cf1b0adec0568e6b1fdb72c7df2e7d499b2b4460b23afc188bdbb3dab312725c"} Sep 30 20:32:37 crc kubenswrapper[4919]: I0930 20:32:37.469269 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bf2c3535-58c7-4a25-aaa3-2050c302c729","Type":"ContainerStarted","Data":"9d540a0466463bec270aacfa412253cf7480d6f85429e4350b42d259dc128fb3"} Sep 30 20:32:37 crc kubenswrapper[4919]: I0930 20:32:37.652240 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69e2e475-a270-4817-b14b-fbb6d78abfa3" path="/var/lib/kubelet/pods/69e2e475-a270-4817-b14b-fbb6d78abfa3/volumes" Sep 30 20:32:37 crc kubenswrapper[4919]: I0930 20:32:37.653487 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 30 20:32:37 crc kubenswrapper[4919]: W0930 20:32:37.660759 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda78d2c04_3656_417b_ace2_cba6a7e90060.slice/crio-d5aa50ec8f3e7a9d3f8089f03a4448e8242cdb563d7e4d321f0d9714edcd1219 WatchSource:0}: Error finding container d5aa50ec8f3e7a9d3f8089f03a4448e8242cdb563d7e4d321f0d9714edcd1219: Status 404 returned error can't find the container with id d5aa50ec8f3e7a9d3f8089f03a4448e8242cdb563d7e4d321f0d9714edcd1219 Sep 30 20:32:37 crc kubenswrapper[4919]: I0930 20:32:37.896388 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:32:38 crc kubenswrapper[4919]: I0930 20:32:38.119123 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:32:38 crc kubenswrapper[4919]: I0930 20:32:38.489764 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a78d2c04-3656-417b-ace2-cba6a7e90060","Type":"ContainerStarted","Data":"d37e5e4c3adf808fb982148d7a249e2c571b8b6261566bc0dc39766951b3a34c"} Sep 30 20:32:38 crc kubenswrapper[4919]: I0930 20:32:38.490120 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a78d2c04-3656-417b-ace2-cba6a7e90060","Type":"ContainerStarted","Data":"d5aa50ec8f3e7a9d3f8089f03a4448e8242cdb563d7e4d321f0d9714edcd1219"} Sep 30 20:32:38 crc kubenswrapper[4919]: I0930 20:32:38.500411 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9247840a-b887-4d3d-b147-6f2d8dbb6fbe","Type":"ContainerStarted","Data":"fa7b6f096dab69e52bd8ebc8291f3e91f26dfb5dea38b715c8c7377a316abdc2"} Sep 30 20:32:38 crc kubenswrapper[4919]: I0930 20:32:38.513171 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"830bc004-4464-408b-9696-1c69dcbcc793","Type":"ContainerStarted","Data":"1b43f029ee709a2a65c1eedfa14e9a41f60be52d43646e0d62b388321d3b3187"} Sep 30 20:32:38 crc kubenswrapper[4919]: I0930 20:32:38.516899 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bf52a250-e8fb-4bd1-a25e-2852fbfb0804","Type":"ContainerStarted","Data":"d17504107bcfc75f48244daf95c102c2e611035f36bcede18a7a71555131b4b5"} Sep 30 20:32:38 crc kubenswrapper[4919]: I0930 20:32:38.568056 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bf2c3535-58c7-4a25-aaa3-2050c302c729","Type":"ContainerStarted","Data":"c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0"} Sep 30 20:32:38 crc kubenswrapper[4919]: I0930 20:32:38.595343 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.595324596 podStartE2EDuration="4.595324596s" podCreationTimestamp="2025-09-30 20:32:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:38.54318115 +0000 UTC m=+1143.659214277" watchObservedRunningTime="2025-09-30 20:32:38.595324596 +0000 UTC m=+1143.711357723" Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.578534 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"830bc004-4464-408b-9696-1c69dcbcc793","Type":"ContainerStarted","Data":"53f6654535ad83457c51707bd47410a0885cdebcf7a93a230c7b9a2010dc61cd"} Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.583161 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" event={"ID":"c6714658-0275-4c06-952f-d84e5121bd9d","Type":"ContainerStarted","Data":"ed46622a300dba9708a1f29da97c0bbd2aff1b4b843944464929611b34488b87"} Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.583469 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.586112 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a78d2c04-3656-417b-ace2-cba6a7e90060","Type":"ContainerStarted","Data":"e8c63300f428d7260fc833bf70dbab653fd1f6c48847cee846af7f76bfc4643d"} Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.589259 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bf2c3535-58c7-4a25-aaa3-2050c302c729","Type":"ContainerStarted","Data":"45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929"} Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.589345 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="bf2c3535-58c7-4a25-aaa3-2050c302c729" containerName="cinder-api-log" containerID="cri-o://c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0" gracePeriod=30 Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.589416 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.589445 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="bf2c3535-58c7-4a25-aaa3-2050c302c729" containerName="cinder-api" containerID="cri-o://45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929" gracePeriod=30 Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.592384 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9247840a-b887-4d3d-b147-6f2d8dbb6fbe","Type":"ContainerStarted","Data":"9f9172a2ee5ac9dc841b2b37e69f44413440df9ea298d3c266e323b82720a9f9"} Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.618403 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" podStartSLOduration=4.618385932 podStartE2EDuration="4.618385932s" podCreationTimestamp="2025-09-30 20:32:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:39.617330032 +0000 UTC m=+1144.733363159" watchObservedRunningTime="2025-09-30 20:32:39.618385932 +0000 UTC m=+1144.734419059" Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.640457 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.640434604 podStartE2EDuration="3.640434604s" podCreationTimestamp="2025-09-30 20:32:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:39.639362344 +0000 UTC m=+1144.755395491" watchObservedRunningTime="2025-09-30 20:32:39.640434604 +0000 UTC m=+1144.756467731" Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.710897 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.487998148 podStartE2EDuration="4.710881275s" podCreationTimestamp="2025-09-30 20:32:35 +0000 UTC" firstStartedPulling="2025-09-30 20:32:36.212035112 +0000 UTC m=+1141.328068239" lastFinishedPulling="2025-09-30 20:32:37.434918239 +0000 UTC m=+1142.550951366" observedRunningTime="2025-09-30 20:32:39.654176029 +0000 UTC m=+1144.770209176" watchObservedRunningTime="2025-09-30 20:32:39.710881275 +0000 UTC m=+1144.826914402" Sep 30 20:32:39 crc kubenswrapper[4919]: I0930 20:32:39.730763 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.730745614 podStartE2EDuration="4.730745614s" podCreationTimestamp="2025-09-30 20:32:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:39.727782549 +0000 UTC m=+1144.843815676" watchObservedRunningTime="2025-09-30 20:32:39.730745614 +0000 UTC m=+1144.846778741" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.208064 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.313992 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-config-data\") pod \"bf2c3535-58c7-4a25-aaa3-2050c302c729\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.314069 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8dqf\" (UniqueName: \"kubernetes.io/projected/bf2c3535-58c7-4a25-aaa3-2050c302c729-kube-api-access-n8dqf\") pod \"bf2c3535-58c7-4a25-aaa3-2050c302c729\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.314178 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-combined-ca-bundle\") pod \"bf2c3535-58c7-4a25-aaa3-2050c302c729\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.314201 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-scripts\") pod \"bf2c3535-58c7-4a25-aaa3-2050c302c729\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.314244 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf2c3535-58c7-4a25-aaa3-2050c302c729-etc-machine-id\") pod \"bf2c3535-58c7-4a25-aaa3-2050c302c729\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.314271 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-config-data-custom\") pod \"bf2c3535-58c7-4a25-aaa3-2050c302c729\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.314291 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf2c3535-58c7-4a25-aaa3-2050c302c729-logs\") pod \"bf2c3535-58c7-4a25-aaa3-2050c302c729\" (UID: \"bf2c3535-58c7-4a25-aaa3-2050c302c729\") " Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.315056 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf2c3535-58c7-4a25-aaa3-2050c302c729-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bf2c3535-58c7-4a25-aaa3-2050c302c729" (UID: "bf2c3535-58c7-4a25-aaa3-2050c302c729"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.315279 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf2c3535-58c7-4a25-aaa3-2050c302c729-logs" (OuterVolumeSpecName: "logs") pod "bf2c3535-58c7-4a25-aaa3-2050c302c729" (UID: "bf2c3535-58c7-4a25-aaa3-2050c302c729"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.319569 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bf2c3535-58c7-4a25-aaa3-2050c302c729" (UID: "bf2c3535-58c7-4a25-aaa3-2050c302c729"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.319721 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-scripts" (OuterVolumeSpecName: "scripts") pod "bf2c3535-58c7-4a25-aaa3-2050c302c729" (UID: "bf2c3535-58c7-4a25-aaa3-2050c302c729"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.319922 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf2c3535-58c7-4a25-aaa3-2050c302c729-kube-api-access-n8dqf" (OuterVolumeSpecName: "kube-api-access-n8dqf") pod "bf2c3535-58c7-4a25-aaa3-2050c302c729" (UID: "bf2c3535-58c7-4a25-aaa3-2050c302c729"). InnerVolumeSpecName "kube-api-access-n8dqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.350842 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf2c3535-58c7-4a25-aaa3-2050c302c729" (UID: "bf2c3535-58c7-4a25-aaa3-2050c302c729"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.378914 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-config-data" (OuterVolumeSpecName: "config-data") pod "bf2c3535-58c7-4a25-aaa3-2050c302c729" (UID: "bf2c3535-58c7-4a25-aaa3-2050c302c729"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.417002 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.417452 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.417483 4919 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf2c3535-58c7-4a25-aaa3-2050c302c729-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.417501 4919 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.417517 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf2c3535-58c7-4a25-aaa3-2050c302c729-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.417529 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf2c3535-58c7-4a25-aaa3-2050c302c729-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.417541 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8dqf\" (UniqueName: \"kubernetes.io/projected/bf2c3535-58c7-4a25-aaa3-2050c302c729-kube-api-access-n8dqf\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.468018 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.499371 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.607640 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"830bc004-4464-408b-9696-1c69dcbcc793","Type":"ContainerStarted","Data":"c68cd5f8cfa139223a59a999d0eca3a1b46126cfb5a9f5b3d2960a79d3bc97e5"} Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.607800 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="ceilometer-central-agent" containerID="cri-o://cf1b0adec0568e6b1fdb72c7df2e7d499b2b4460b23afc188bdbb3dab312725c" gracePeriod=30 Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.608049 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.608295 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="sg-core" containerID="cri-o://53f6654535ad83457c51707bd47410a0885cdebcf7a93a230c7b9a2010dc61cd" gracePeriod=30 Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.608313 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="proxy-httpd" containerID="cri-o://c68cd5f8cfa139223a59a999d0eca3a1b46126cfb5a9f5b3d2960a79d3bc97e5" gracePeriod=30 Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.608349 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="ceilometer-notification-agent" containerID="cri-o://1b43f029ee709a2a65c1eedfa14e9a41f60be52d43646e0d62b388321d3b3187" gracePeriod=30 Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.611041 4919 generic.go:334] "Generic (PLEG): container finished" podID="bf2c3535-58c7-4a25-aaa3-2050c302c729" containerID="45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929" exitCode=0 Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.611062 4919 generic.go:334] "Generic (PLEG): container finished" podID="bf2c3535-58c7-4a25-aaa3-2050c302c729" containerID="c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0" exitCode=143 Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.611153 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.611161 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bf2c3535-58c7-4a25-aaa3-2050c302c729","Type":"ContainerDied","Data":"45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929"} Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.611204 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bf2c3535-58c7-4a25-aaa3-2050c302c729","Type":"ContainerDied","Data":"c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0"} Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.611227 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"bf2c3535-58c7-4a25-aaa3-2050c302c729","Type":"ContainerDied","Data":"9d540a0466463bec270aacfa412253cf7480d6f85429e4350b42d259dc128fb3"} Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.611240 4919 scope.go:117] "RemoveContainer" containerID="45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.635937 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.174265258 podStartE2EDuration="6.635917751s" podCreationTimestamp="2025-09-30 20:32:34 +0000 UTC" firstStartedPulling="2025-09-30 20:32:35.82281899 +0000 UTC m=+1140.938852117" lastFinishedPulling="2025-09-30 20:32:40.284471483 +0000 UTC m=+1145.400504610" observedRunningTime="2025-09-30 20:32:40.631279118 +0000 UTC m=+1145.747312245" watchObservedRunningTime="2025-09-30 20:32:40.635917751 +0000 UTC m=+1145.751950878" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.700370 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.723005 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.726561 4919 scope.go:117] "RemoveContainer" containerID="c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.734276 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:32:40 crc kubenswrapper[4919]: E0930 20:32:40.734740 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf2c3535-58c7-4a25-aaa3-2050c302c729" containerName="cinder-api-log" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.734766 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf2c3535-58c7-4a25-aaa3-2050c302c729" containerName="cinder-api-log" Sep 30 20:32:40 crc kubenswrapper[4919]: E0930 20:32:40.734811 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf2c3535-58c7-4a25-aaa3-2050c302c729" containerName="cinder-api" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.734818 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf2c3535-58c7-4a25-aaa3-2050c302c729" containerName="cinder-api" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.735008 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf2c3535-58c7-4a25-aaa3-2050c302c729" containerName="cinder-api" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.735044 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf2c3535-58c7-4a25-aaa3-2050c302c729" containerName="cinder-api-log" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.736195 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.739336 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.739569 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.740473 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.743375 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.811479 4919 scope.go:117] "RemoveContainer" containerID="45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929" Sep 30 20:32:40 crc kubenswrapper[4919]: E0930 20:32:40.812155 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929\": container with ID starting with 45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929 not found: ID does not exist" containerID="45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.812205 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929"} err="failed to get container status \"45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929\": rpc error: code = NotFound desc = could not find container \"45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929\": container with ID starting with 45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929 not found: ID does not exist" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.812240 4919 scope.go:117] "RemoveContainer" containerID="c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0" Sep 30 20:32:40 crc kubenswrapper[4919]: E0930 20:32:40.812703 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0\": container with ID starting with c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0 not found: ID does not exist" containerID="c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.812729 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0"} err="failed to get container status \"c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0\": rpc error: code = NotFound desc = could not find container \"c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0\": container with ID starting with c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0 not found: ID does not exist" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.812744 4919 scope.go:117] "RemoveContainer" containerID="45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.813605 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929"} err="failed to get container status \"45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929\": rpc error: code = NotFound desc = could not find container \"45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929\": container with ID starting with 45a250274291a24ad22e68e1975f519e9ca1646be70c73705550666c66de8929 not found: ID does not exist" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.813628 4919 scope.go:117] "RemoveContainer" containerID="c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.813946 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0"} err="failed to get container status \"c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0\": rpc error: code = NotFound desc = could not find container \"c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0\": container with ID starting with c26d26282a3721e4897ef1815a0f862765c9bdaf7b16adb457e848e3d8c6e5b0 not found: ID does not exist" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.850016 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2326593c-681c-435f-85bc-126dfddc85a4-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.850241 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-scripts\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.850394 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-config-data-custom\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.850510 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.850559 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2326593c-681c-435f-85bc-126dfddc85a4-logs\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.850593 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.850763 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk6nk\" (UniqueName: \"kubernetes.io/projected/2326593c-681c-435f-85bc-126dfddc85a4-kube-api-access-pk6nk\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.850804 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-config-data\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.850848 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.952736 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-scripts\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.952811 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-config-data-custom\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.952853 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.952871 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2326593c-681c-435f-85bc-126dfddc85a4-logs\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.952889 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.952945 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk6nk\" (UniqueName: \"kubernetes.io/projected/2326593c-681c-435f-85bc-126dfddc85a4-kube-api-access-pk6nk\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.952975 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-config-data\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.953024 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.953091 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2326593c-681c-435f-85bc-126dfddc85a4-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.953174 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2326593c-681c-435f-85bc-126dfddc85a4-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.954661 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2326593c-681c-435f-85bc-126dfddc85a4-logs\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.958572 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-scripts\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.959909 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.960391 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.969990 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk6nk\" (UniqueName: \"kubernetes.io/projected/2326593c-681c-435f-85bc-126dfddc85a4-kube-api-access-pk6nk\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.973849 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-config-data-custom\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.974625 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-config-data\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:40 crc kubenswrapper[4919]: I0930 20:32:40.975895 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2326593c-681c-435f-85bc-126dfddc85a4-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2326593c-681c-435f-85bc-126dfddc85a4\") " pod="openstack/cinder-api-0" Sep 30 20:32:41 crc kubenswrapper[4919]: I0930 20:32:41.110593 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 30 20:32:41 crc kubenswrapper[4919]: I0930 20:32:41.567710 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 30 20:32:41 crc kubenswrapper[4919]: W0930 20:32:41.575355 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2326593c_681c_435f_85bc_126dfddc85a4.slice/crio-6caf4d507bfa2b133da6c895ebaa1b9dc0fb818f903c40f836b0d07b1c993365 WatchSource:0}: Error finding container 6caf4d507bfa2b133da6c895ebaa1b9dc0fb818f903c40f836b0d07b1c993365: Status 404 returned error can't find the container with id 6caf4d507bfa2b133da6c895ebaa1b9dc0fb818f903c40f836b0d07b1c993365 Sep 30 20:32:41 crc kubenswrapper[4919]: I0930 20:32:41.622960 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2326593c-681c-435f-85bc-126dfddc85a4","Type":"ContainerStarted","Data":"6caf4d507bfa2b133da6c895ebaa1b9dc0fb818f903c40f836b0d07b1c993365"} Sep 30 20:32:41 crc kubenswrapper[4919]: I0930 20:32:41.630824 4919 generic.go:334] "Generic (PLEG): container finished" podID="830bc004-4464-408b-9696-1c69dcbcc793" containerID="53f6654535ad83457c51707bd47410a0885cdebcf7a93a230c7b9a2010dc61cd" exitCode=2 Sep 30 20:32:41 crc kubenswrapper[4919]: I0930 20:32:41.631145 4919 generic.go:334] "Generic (PLEG): container finished" podID="830bc004-4464-408b-9696-1c69dcbcc793" containerID="1b43f029ee709a2a65c1eedfa14e9a41f60be52d43646e0d62b388321d3b3187" exitCode=0 Sep 30 20:32:41 crc kubenswrapper[4919]: I0930 20:32:41.630931 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"830bc004-4464-408b-9696-1c69dcbcc793","Type":"ContainerDied","Data":"53f6654535ad83457c51707bd47410a0885cdebcf7a93a230c7b9a2010dc61cd"} Sep 30 20:32:41 crc kubenswrapper[4919]: I0930 20:32:41.631261 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"830bc004-4464-408b-9696-1c69dcbcc793","Type":"ContainerDied","Data":"1b43f029ee709a2a65c1eedfa14e9a41f60be52d43646e0d62b388321d3b3187"} Sep 30 20:32:41 crc kubenswrapper[4919]: I0930 20:32:41.651899 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf2c3535-58c7-4a25-aaa3-2050c302c729" path="/var/lib/kubelet/pods/bf2c3535-58c7-4a25-aaa3-2050c302c729/volumes" Sep 30 20:32:42 crc kubenswrapper[4919]: I0930 20:32:42.655581 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2326593c-681c-435f-85bc-126dfddc85a4","Type":"ContainerStarted","Data":"286fa4d4aafd64fb105269a8b8ed07f6d636cd197d2648ba515d8ea251db2279"} Sep 30 20:32:42 crc kubenswrapper[4919]: I0930 20:32:42.659779 4919 generic.go:334] "Generic (PLEG): container finished" podID="830bc004-4464-408b-9696-1c69dcbcc793" containerID="cf1b0adec0568e6b1fdb72c7df2e7d499b2b4460b23afc188bdbb3dab312725c" exitCode=0 Sep 30 20:32:42 crc kubenswrapper[4919]: I0930 20:32:42.659829 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"830bc004-4464-408b-9696-1c69dcbcc793","Type":"ContainerDied","Data":"cf1b0adec0568e6b1fdb72c7df2e7d499b2b4460b23afc188bdbb3dab312725c"} Sep 30 20:32:43 crc kubenswrapper[4919]: I0930 20:32:43.214804 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-db95ddc59-4ffw5" Sep 30 20:32:43 crc kubenswrapper[4919]: I0930 20:32:43.275389 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7975dfb48-wvh2l"] Sep 30 20:32:43 crc kubenswrapper[4919]: I0930 20:32:43.275660 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7975dfb48-wvh2l" podUID="b97d953a-480a-41b8-bbc5-b9a87b3a20cb" containerName="neutron-api" containerID="cri-o://f5d1214c9514f8da649a24e16ddb47e8ea8c9a384bdc26619aa32cd2c5a47859" gracePeriod=30 Sep 30 20:32:43 crc kubenswrapper[4919]: I0930 20:32:43.275768 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7975dfb48-wvh2l" podUID="b97d953a-480a-41b8-bbc5-b9a87b3a20cb" containerName="neutron-httpd" containerID="cri-o://8298497be040e62e3183ad776599c51924b3293af2ebff9fb704621cffdb427d" gracePeriod=30 Sep 30 20:32:43 crc kubenswrapper[4919]: I0930 20:32:43.330632 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:43 crc kubenswrapper[4919]: I0930 20:32:43.330678 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-79b575f787-8gljl" Sep 30 20:32:43 crc kubenswrapper[4919]: I0930 20:32:43.690494 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2326593c-681c-435f-85bc-126dfddc85a4","Type":"ContainerStarted","Data":"3ee2a7742b2b2c9978809e6d6345cf341c5964fdd4441b4799f4e9c2108b9f61"} Sep 30 20:32:43 crc kubenswrapper[4919]: I0930 20:32:43.691842 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 30 20:32:43 crc kubenswrapper[4919]: I0930 20:32:43.707361 4919 generic.go:334] "Generic (PLEG): container finished" podID="b97d953a-480a-41b8-bbc5-b9a87b3a20cb" containerID="8298497be040e62e3183ad776599c51924b3293af2ebff9fb704621cffdb427d" exitCode=0 Sep 30 20:32:43 crc kubenswrapper[4919]: I0930 20:32:43.707416 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7975dfb48-wvh2l" event={"ID":"b97d953a-480a-41b8-bbc5-b9a87b3a20cb","Type":"ContainerDied","Data":"8298497be040e62e3183ad776599c51924b3293af2ebff9fb704621cffdb427d"} Sep 30 20:32:43 crc kubenswrapper[4919]: I0930 20:32:43.720365 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.72034287 podStartE2EDuration="3.72034287s" podCreationTimestamp="2025-09-30 20:32:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:43.714708678 +0000 UTC m=+1148.830741805" watchObservedRunningTime="2025-09-30 20:32:43.72034287 +0000 UTC m=+1148.836375997" Sep 30 20:32:45 crc kubenswrapper[4919]: I0930 20:32:45.446653 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 20:32:45 crc kubenswrapper[4919]: I0930 20:32:45.446972 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 30 20:32:45 crc kubenswrapper[4919]: I0930 20:32:45.489567 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 20:32:45 crc kubenswrapper[4919]: I0930 20:32:45.496957 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 30 20:32:45 crc kubenswrapper[4919]: I0930 20:32:45.560403 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:32:45 crc kubenswrapper[4919]: I0930 20:32:45.659415 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-22zsz"] Sep 30 20:32:45 crc kubenswrapper[4919]: I0930 20:32:45.659634 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" podUID="32fe776c-73a1-43fc-90c4-75c1f56c9966" containerName="dnsmasq-dns" containerID="cri-o://6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd" gracePeriod=10 Sep 30 20:32:45 crc kubenswrapper[4919]: I0930 20:32:45.733073 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 20:32:45 crc kubenswrapper[4919]: I0930 20:32:45.733110 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 30 20:32:45 crc kubenswrapper[4919]: I0930 20:32:45.821986 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 20:32:45 crc kubenswrapper[4919]: I0930 20:32:45.856321 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.300015 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.394842 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-config\") pod \"32fe776c-73a1-43fc-90c4-75c1f56c9966\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.394975 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s98rj\" (UniqueName: \"kubernetes.io/projected/32fe776c-73a1-43fc-90c4-75c1f56c9966-kube-api-access-s98rj\") pod \"32fe776c-73a1-43fc-90c4-75c1f56c9966\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.395068 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-dns-svc\") pod \"32fe776c-73a1-43fc-90c4-75c1f56c9966\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.395096 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-ovsdbserver-nb\") pod \"32fe776c-73a1-43fc-90c4-75c1f56c9966\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.395131 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-dns-swift-storage-0\") pod \"32fe776c-73a1-43fc-90c4-75c1f56c9966\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.395245 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-ovsdbserver-sb\") pod \"32fe776c-73a1-43fc-90c4-75c1f56c9966\" (UID: \"32fe776c-73a1-43fc-90c4-75c1f56c9966\") " Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.420185 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32fe776c-73a1-43fc-90c4-75c1f56c9966-kube-api-access-s98rj" (OuterVolumeSpecName: "kube-api-access-s98rj") pod "32fe776c-73a1-43fc-90c4-75c1f56c9966" (UID: "32fe776c-73a1-43fc-90c4-75c1f56c9966"). InnerVolumeSpecName "kube-api-access-s98rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.448189 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "32fe776c-73a1-43fc-90c4-75c1f56c9966" (UID: "32fe776c-73a1-43fc-90c4-75c1f56c9966"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.474501 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "32fe776c-73a1-43fc-90c4-75c1f56c9966" (UID: "32fe776c-73a1-43fc-90c4-75c1f56c9966"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.478133 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "32fe776c-73a1-43fc-90c4-75c1f56c9966" (UID: "32fe776c-73a1-43fc-90c4-75c1f56c9966"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.490730 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-config" (OuterVolumeSpecName: "config") pod "32fe776c-73a1-43fc-90c4-75c1f56c9966" (UID: "32fe776c-73a1-43fc-90c4-75c1f56c9966"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.498496 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.498524 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.498536 4919 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.498546 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.498556 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s98rj\" (UniqueName: \"kubernetes.io/projected/32fe776c-73a1-43fc-90c4-75c1f56c9966-kube-api-access-s98rj\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.507920 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "32fe776c-73a1-43fc-90c4-75c1f56c9966" (UID: "32fe776c-73a1-43fc-90c4-75c1f56c9966"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.600593 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/32fe776c-73a1-43fc-90c4-75c1f56c9966-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.743185 4919 generic.go:334] "Generic (PLEG): container finished" podID="32fe776c-73a1-43fc-90c4-75c1f56c9966" containerID="6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd" exitCode=0 Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.743436 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9247840a-b887-4d3d-b147-6f2d8dbb6fbe" containerName="cinder-scheduler" containerID="cri-o://fa7b6f096dab69e52bd8ebc8291f3e91f26dfb5dea38b715c8c7377a316abdc2" gracePeriod=30 Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.743800 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" event={"ID":"32fe776c-73a1-43fc-90c4-75c1f56c9966","Type":"ContainerDied","Data":"6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd"} Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.743838 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" event={"ID":"32fe776c-73a1-43fc-90c4-75c1f56c9966","Type":"ContainerDied","Data":"def45f6efd4ec7f67b5f72272d2049e2329d3698d61aba4b2df346b7565ea135"} Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.743860 4919 scope.go:117] "RemoveContainer" containerID="6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.743880 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="9247840a-b887-4d3d-b147-6f2d8dbb6fbe" containerName="probe" containerID="cri-o://9f9172a2ee5ac9dc841b2b37e69f44413440df9ea298d3c266e323b82720a9f9" gracePeriod=30 Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.744048 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75c8ddd69c-22zsz" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.763486 4919 scope.go:117] "RemoveContainer" containerID="d93fbc801fd0a3bc0daa9b975799fc73270349cd9304aa518eb484cdfaf6aaf1" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.792683 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-22zsz"] Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.813889 4919 scope.go:117] "RemoveContainer" containerID="6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd" Sep 30 20:32:46 crc kubenswrapper[4919]: E0930 20:32:46.814434 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd\": container with ID starting with 6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd not found: ID does not exist" containerID="6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.814489 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd"} err="failed to get container status \"6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd\": rpc error: code = NotFound desc = could not find container \"6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd\": container with ID starting with 6ea3a262ceed2b4478faf8f6fe39d352e280f11ed5e2064d2ed46b06bf68facd not found: ID does not exist" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.814525 4919 scope.go:117] "RemoveContainer" containerID="d93fbc801fd0a3bc0daa9b975799fc73270349cd9304aa518eb484cdfaf6aaf1" Sep 30 20:32:46 crc kubenswrapper[4919]: E0930 20:32:46.815861 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d93fbc801fd0a3bc0daa9b975799fc73270349cd9304aa518eb484cdfaf6aaf1\": container with ID starting with d93fbc801fd0a3bc0daa9b975799fc73270349cd9304aa518eb484cdfaf6aaf1 not found: ID does not exist" containerID="d93fbc801fd0a3bc0daa9b975799fc73270349cd9304aa518eb484cdfaf6aaf1" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.815894 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d93fbc801fd0a3bc0daa9b975799fc73270349cd9304aa518eb484cdfaf6aaf1"} err="failed to get container status \"d93fbc801fd0a3bc0daa9b975799fc73270349cd9304aa518eb484cdfaf6aaf1\": rpc error: code = NotFound desc = could not find container \"d93fbc801fd0a3bc0daa9b975799fc73270349cd9304aa518eb484cdfaf6aaf1\": container with ID starting with d93fbc801fd0a3bc0daa9b975799fc73270349cd9304aa518eb484cdfaf6aaf1 not found: ID does not exist" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.820369 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75c8ddd69c-22zsz"] Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.850642 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.850703 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.896720 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:46 crc kubenswrapper[4919]: I0930 20:32:46.919878 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:47 crc kubenswrapper[4919]: I0930 20:32:47.644410 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32fe776c-73a1-43fc-90c4-75c1f56c9966" path="/var/lib/kubelet/pods/32fe776c-73a1-43fc-90c4-75c1f56c9966/volumes" Sep 30 20:32:47 crc kubenswrapper[4919]: I0930 20:32:47.762468 4919 generic.go:334] "Generic (PLEG): container finished" podID="9247840a-b887-4d3d-b147-6f2d8dbb6fbe" containerID="9f9172a2ee5ac9dc841b2b37e69f44413440df9ea298d3c266e323b82720a9f9" exitCode=0 Sep 30 20:32:47 crc kubenswrapper[4919]: I0930 20:32:47.762533 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9247840a-b887-4d3d-b147-6f2d8dbb6fbe","Type":"ContainerDied","Data":"9f9172a2ee5ac9dc841b2b37e69f44413440df9ea298d3c266e323b82720a9f9"} Sep 30 20:32:47 crc kubenswrapper[4919]: I0930 20:32:47.764473 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:47 crc kubenswrapper[4919]: I0930 20:32:47.764495 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:48 crc kubenswrapper[4919]: I0930 20:32:48.081682 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 20:32:48 crc kubenswrapper[4919]: I0930 20:32:48.081808 4919 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 30 20:32:48 crc kubenswrapper[4919]: I0930 20:32:48.087902 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 30 20:32:49 crc kubenswrapper[4919]: I0930 20:32:49.721588 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:49 crc kubenswrapper[4919]: I0930 20:32:49.763731 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 30 20:32:49 crc kubenswrapper[4919]: I0930 20:32:49.785962 4919 generic.go:334] "Generic (PLEG): container finished" podID="b97d953a-480a-41b8-bbc5-b9a87b3a20cb" containerID="f5d1214c9514f8da649a24e16ddb47e8ea8c9a384bdc26619aa32cd2c5a47859" exitCode=0 Sep 30 20:32:49 crc kubenswrapper[4919]: I0930 20:32:49.786021 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7975dfb48-wvh2l" event={"ID":"b97d953a-480a-41b8-bbc5-b9a87b3a20cb","Type":"ContainerDied","Data":"f5d1214c9514f8da649a24e16ddb47e8ea8c9a384bdc26619aa32cd2c5a47859"} Sep 30 20:32:49 crc kubenswrapper[4919]: I0930 20:32:49.796502 4919 generic.go:334] "Generic (PLEG): container finished" podID="9247840a-b887-4d3d-b147-6f2d8dbb6fbe" containerID="fa7b6f096dab69e52bd8ebc8291f3e91f26dfb5dea38b715c8c7377a316abdc2" exitCode=0 Sep 30 20:32:49 crc kubenswrapper[4919]: I0930 20:32:49.797337 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9247840a-b887-4d3d-b147-6f2d8dbb6fbe","Type":"ContainerDied","Data":"fa7b6f096dab69e52bd8ebc8291f3e91f26dfb5dea38b715c8c7377a316abdc2"} Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.609006 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.690803 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-combined-ca-bundle\") pod \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.691028 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-config-data-custom\") pod \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.691185 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-etc-machine-id\") pod \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.691299 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cr2xg\" (UniqueName: \"kubernetes.io/projected/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-kube-api-access-cr2xg\") pod \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.691389 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-config-data\") pod \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.691502 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-scripts\") pod \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\" (UID: \"9247840a-b887-4d3d-b147-6f2d8dbb6fbe\") " Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.695685 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9247840a-b887-4d3d-b147-6f2d8dbb6fbe" (UID: "9247840a-b887-4d3d-b147-6f2d8dbb6fbe"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.701071 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-scripts" (OuterVolumeSpecName: "scripts") pod "9247840a-b887-4d3d-b147-6f2d8dbb6fbe" (UID: "9247840a-b887-4d3d-b147-6f2d8dbb6fbe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.701289 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-kube-api-access-cr2xg" (OuterVolumeSpecName: "kube-api-access-cr2xg") pod "9247840a-b887-4d3d-b147-6f2d8dbb6fbe" (UID: "9247840a-b887-4d3d-b147-6f2d8dbb6fbe"). InnerVolumeSpecName "kube-api-access-cr2xg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.716902 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9247840a-b887-4d3d-b147-6f2d8dbb6fbe" (UID: "9247840a-b887-4d3d-b147-6f2d8dbb6fbe"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.777407 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9247840a-b887-4d3d-b147-6f2d8dbb6fbe" (UID: "9247840a-b887-4d3d-b147-6f2d8dbb6fbe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.794277 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.794316 4919 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.794330 4919 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.794342 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cr2xg\" (UniqueName: \"kubernetes.io/projected/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-kube-api-access-cr2xg\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.794356 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.810607 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7975dfb48-wvh2l" event={"ID":"b97d953a-480a-41b8-bbc5-b9a87b3a20cb","Type":"ContainerDied","Data":"04758ee6b14c13dc53a38e633efb79bbc288b37d74bd9d7e88af24abb7f9a288"} Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.810656 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04758ee6b14c13dc53a38e633efb79bbc288b37d74bd9d7e88af24abb7f9a288" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.813484 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.814000 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9247840a-b887-4d3d-b147-6f2d8dbb6fbe","Type":"ContainerDied","Data":"0925ec3779a86614cb279f8bc10c7c21931ac76d1d904556545cf466b3a48f20"} Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.814039 4919 scope.go:117] "RemoveContainer" containerID="9f9172a2ee5ac9dc841b2b37e69f44413440df9ea298d3c266e323b82720a9f9" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.884336 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.892264 4919 scope.go:117] "RemoveContainer" containerID="fa7b6f096dab69e52bd8ebc8291f3e91f26dfb5dea38b715c8c7377a316abdc2" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.897720 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-config-data" (OuterVolumeSpecName: "config-data") pod "9247840a-b887-4d3d-b147-6f2d8dbb6fbe" (UID: "9247840a-b887-4d3d-b147-6f2d8dbb6fbe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.997127 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-httpd-config\") pod \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.997268 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-combined-ca-bundle\") pod \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.997331 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-ovndb-tls-certs\") pod \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.997356 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-config\") pod \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.997380 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9kn9\" (UniqueName: \"kubernetes.io/projected/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-kube-api-access-k9kn9\") pod \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\" (UID: \"b97d953a-480a-41b8-bbc5-b9a87b3a20cb\") " Sep 30 20:32:50 crc kubenswrapper[4919]: I0930 20:32:50.997831 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9247840a-b887-4d3d-b147-6f2d8dbb6fbe-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.003343 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-kube-api-access-k9kn9" (OuterVolumeSpecName: "kube-api-access-k9kn9") pod "b97d953a-480a-41b8-bbc5-b9a87b3a20cb" (UID: "b97d953a-480a-41b8-bbc5-b9a87b3a20cb"). InnerVolumeSpecName "kube-api-access-k9kn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.004764 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "b97d953a-480a-41b8-bbc5-b9a87b3a20cb" (UID: "b97d953a-480a-41b8-bbc5-b9a87b3a20cb"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.066169 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b97d953a-480a-41b8-bbc5-b9a87b3a20cb" (UID: "b97d953a-480a-41b8-bbc5-b9a87b3a20cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.081485 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-config" (OuterVolumeSpecName: "config") pod "b97d953a-480a-41b8-bbc5-b9a87b3a20cb" (UID: "b97d953a-480a-41b8-bbc5-b9a87b3a20cb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.099625 4919 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.099665 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.099676 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.099684 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9kn9\" (UniqueName: \"kubernetes.io/projected/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-kube-api-access-k9kn9\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.104553 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "b97d953a-480a-41b8-bbc5-b9a87b3a20cb" (UID: "b97d953a-480a-41b8-bbc5-b9a87b3a20cb"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.152839 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.160099 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.171876 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:32:51 crc kubenswrapper[4919]: E0930 20:32:51.172204 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b97d953a-480a-41b8-bbc5-b9a87b3a20cb" containerName="neutron-httpd" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.172234 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="b97d953a-480a-41b8-bbc5-b9a87b3a20cb" containerName="neutron-httpd" Sep 30 20:32:51 crc kubenswrapper[4919]: E0930 20:32:51.172250 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9247840a-b887-4d3d-b147-6f2d8dbb6fbe" containerName="cinder-scheduler" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.172257 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9247840a-b887-4d3d-b147-6f2d8dbb6fbe" containerName="cinder-scheduler" Sep 30 20:32:51 crc kubenswrapper[4919]: E0930 20:32:51.172278 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fe776c-73a1-43fc-90c4-75c1f56c9966" containerName="dnsmasq-dns" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.172284 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fe776c-73a1-43fc-90c4-75c1f56c9966" containerName="dnsmasq-dns" Sep 30 20:32:51 crc kubenswrapper[4919]: E0930 20:32:51.172295 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b97d953a-480a-41b8-bbc5-b9a87b3a20cb" containerName="neutron-api" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.172301 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="b97d953a-480a-41b8-bbc5-b9a87b3a20cb" containerName="neutron-api" Sep 30 20:32:51 crc kubenswrapper[4919]: E0930 20:32:51.172318 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9247840a-b887-4d3d-b147-6f2d8dbb6fbe" containerName="probe" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.172325 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9247840a-b887-4d3d-b147-6f2d8dbb6fbe" containerName="probe" Sep 30 20:32:51 crc kubenswrapper[4919]: E0930 20:32:51.172336 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32fe776c-73a1-43fc-90c4-75c1f56c9966" containerName="init" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.172343 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="32fe776c-73a1-43fc-90c4-75c1f56c9966" containerName="init" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.172494 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9247840a-b887-4d3d-b147-6f2d8dbb6fbe" containerName="cinder-scheduler" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.172506 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9247840a-b887-4d3d-b147-6f2d8dbb6fbe" containerName="probe" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.172519 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="32fe776c-73a1-43fc-90c4-75c1f56c9966" containerName="dnsmasq-dns" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.172526 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="b97d953a-480a-41b8-bbc5-b9a87b3a20cb" containerName="neutron-api" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.172539 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="b97d953a-480a-41b8-bbc5-b9a87b3a20cb" containerName="neutron-httpd" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.174910 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.176637 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.189176 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.201356 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0207f9a6-d481-4499-81a2-7e9bbaba9000-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.201396 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0207f9a6-d481-4499-81a2-7e9bbaba9000-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.201419 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0207f9a6-d481-4499-81a2-7e9bbaba9000-config-data\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.201453 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0207f9a6-d481-4499-81a2-7e9bbaba9000-scripts\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.201542 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr2z5\" (UniqueName: \"kubernetes.io/projected/0207f9a6-d481-4499-81a2-7e9bbaba9000-kube-api-access-nr2z5\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.201609 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0207f9a6-d481-4499-81a2-7e9bbaba9000-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.201755 4919 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b97d953a-480a-41b8-bbc5-b9a87b3a20cb-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.303288 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr2z5\" (UniqueName: \"kubernetes.io/projected/0207f9a6-d481-4499-81a2-7e9bbaba9000-kube-api-access-nr2z5\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.303385 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0207f9a6-d481-4499-81a2-7e9bbaba9000-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.303437 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0207f9a6-d481-4499-81a2-7e9bbaba9000-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.303463 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0207f9a6-d481-4499-81a2-7e9bbaba9000-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.303491 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0207f9a6-d481-4499-81a2-7e9bbaba9000-config-data\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.303532 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0207f9a6-d481-4499-81a2-7e9bbaba9000-scripts\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.303749 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0207f9a6-d481-4499-81a2-7e9bbaba9000-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.310794 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0207f9a6-d481-4499-81a2-7e9bbaba9000-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.316293 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0207f9a6-d481-4499-81a2-7e9bbaba9000-config-data\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.321944 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0207f9a6-d481-4499-81a2-7e9bbaba9000-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.322080 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0207f9a6-d481-4499-81a2-7e9bbaba9000-scripts\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.326713 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr2z5\" (UniqueName: \"kubernetes.io/projected/0207f9a6-d481-4499-81a2-7e9bbaba9000-kube-api-access-nr2z5\") pod \"cinder-scheduler-0\" (UID: \"0207f9a6-d481-4499-81a2-7e9bbaba9000\") " pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.502453 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.666015 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9247840a-b887-4d3d-b147-6f2d8dbb6fbe" path="/var/lib/kubelet/pods/9247840a-b887-4d3d-b147-6f2d8dbb6fbe/volumes" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.821544 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7975dfb48-wvh2l" Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.867745 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7975dfb48-wvh2l"] Sep 30 20:32:51 crc kubenswrapper[4919]: I0930 20:32:51.872989 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7975dfb48-wvh2l"] Sep 30 20:32:52 crc kubenswrapper[4919]: I0930 20:32:52.035685 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 30 20:32:52 crc kubenswrapper[4919]: I0930 20:32:52.837780 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0207f9a6-d481-4499-81a2-7e9bbaba9000","Type":"ContainerStarted","Data":"f6caaf686653d1461eebf089edc653f7134647b1612015e49a8f0da0e9579940"} Sep 30 20:32:52 crc kubenswrapper[4919]: I0930 20:32:52.838166 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0207f9a6-d481-4499-81a2-7e9bbaba9000","Type":"ContainerStarted","Data":"864b943ec6c6bb4a0060d17e7d2ed434871566c8807ff6e79a08f5a6b9318cbd"} Sep 30 20:32:53 crc kubenswrapper[4919]: I0930 20:32:53.306468 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 30 20:32:53 crc kubenswrapper[4919]: I0930 20:32:53.650676 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b97d953a-480a-41b8-bbc5-b9a87b3a20cb" path="/var/lib/kubelet/pods/b97d953a-480a-41b8-bbc5-b9a87b3a20cb/volumes" Sep 30 20:32:53 crc kubenswrapper[4919]: I0930 20:32:53.847520 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0207f9a6-d481-4499-81a2-7e9bbaba9000","Type":"ContainerStarted","Data":"a5859e54abcb196e27c78932b6f4031b4b229e442e82535b214e500079131508"} Sep 30 20:32:53 crc kubenswrapper[4919]: I0930 20:32:53.870960 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.8709428089999998 podStartE2EDuration="2.870942809s" podCreationTimestamp="2025-09-30 20:32:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:32:53.863041552 +0000 UTC m=+1158.979074679" watchObservedRunningTime="2025-09-30 20:32:53.870942809 +0000 UTC m=+1158.986975936" Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.540192 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-5f5gt"] Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.544846 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5f5gt" Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.555825 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-5f5gt"] Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.580265 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxjw2\" (UniqueName: \"kubernetes.io/projected/c442ecc0-a212-481d-add8-69ceb0c1cd1a-kube-api-access-xxjw2\") pod \"nova-api-db-create-5f5gt\" (UID: \"c442ecc0-a212-481d-add8-69ceb0c1cd1a\") " pod="openstack/nova-api-db-create-5f5gt" Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.654128 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-qpzv7"] Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.655727 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-qpzv7" Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.664475 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-qpzv7"] Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.681778 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxjw2\" (UniqueName: \"kubernetes.io/projected/c442ecc0-a212-481d-add8-69ceb0c1cd1a-kube-api-access-xxjw2\") pod \"nova-api-db-create-5f5gt\" (UID: \"c442ecc0-a212-481d-add8-69ceb0c1cd1a\") " pod="openstack/nova-api-db-create-5f5gt" Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.683123 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncz5m\" (UniqueName: \"kubernetes.io/projected/a78a5894-3d96-47fd-af15-15a6c66eb554-kube-api-access-ncz5m\") pod \"nova-cell0-db-create-qpzv7\" (UID: \"a78a5894-3d96-47fd-af15-15a6c66eb554\") " pod="openstack/nova-cell0-db-create-qpzv7" Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.723234 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxjw2\" (UniqueName: \"kubernetes.io/projected/c442ecc0-a212-481d-add8-69ceb0c1cd1a-kube-api-access-xxjw2\") pod \"nova-api-db-create-5f5gt\" (UID: \"c442ecc0-a212-481d-add8-69ceb0c1cd1a\") " pod="openstack/nova-api-db-create-5f5gt" Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.785556 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncz5m\" (UniqueName: \"kubernetes.io/projected/a78a5894-3d96-47fd-af15-15a6c66eb554-kube-api-access-ncz5m\") pod \"nova-cell0-db-create-qpzv7\" (UID: \"a78a5894-3d96-47fd-af15-15a6c66eb554\") " pod="openstack/nova-cell0-db-create-qpzv7" Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.806329 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncz5m\" (UniqueName: \"kubernetes.io/projected/a78a5894-3d96-47fd-af15-15a6c66eb554-kube-api-access-ncz5m\") pod \"nova-cell0-db-create-qpzv7\" (UID: \"a78a5894-3d96-47fd-af15-15a6c66eb554\") " pod="openstack/nova-cell0-db-create-qpzv7" Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.849880 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-hc97v"] Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.851419 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-hc97v" Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.860096 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-hc97v"] Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.896977 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5f5gt" Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.904573 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bc75f\" (UniqueName: \"kubernetes.io/projected/987212c4-8615-4a95-8779-768b5c0e0894-kube-api-access-bc75f\") pod \"nova-cell1-db-create-hc97v\" (UID: \"987212c4-8615-4a95-8779-768b5c0e0894\") " pod="openstack/nova-cell1-db-create-hc97v" Sep 30 20:32:55 crc kubenswrapper[4919]: I0930 20:32:55.970963 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-qpzv7" Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.005831 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bc75f\" (UniqueName: \"kubernetes.io/projected/987212c4-8615-4a95-8779-768b5c0e0894-kube-api-access-bc75f\") pod \"nova-cell1-db-create-hc97v\" (UID: \"987212c4-8615-4a95-8779-768b5c0e0894\") " pod="openstack/nova-cell1-db-create-hc97v" Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.030152 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bc75f\" (UniqueName: \"kubernetes.io/projected/987212c4-8615-4a95-8779-768b5c0e0894-kube-api-access-bc75f\") pod \"nova-cell1-db-create-hc97v\" (UID: \"987212c4-8615-4a95-8779-768b5c0e0894\") " pod="openstack/nova-cell1-db-create-hc97v" Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.217345 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-hc97v" Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.372837 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-5f5gt"] Sep 30 20:32:56 crc kubenswrapper[4919]: W0930 20:32:56.385613 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc442ecc0_a212_481d_add8_69ceb0c1cd1a.slice/crio-a13f1092cc7faebbf45c258ba0e63c5dabab3937852a64d3794d5c17e4167ff8 WatchSource:0}: Error finding container a13f1092cc7faebbf45c258ba0e63c5dabab3937852a64d3794d5c17e4167ff8: Status 404 returned error can't find the container with id a13f1092cc7faebbf45c258ba0e63c5dabab3937852a64d3794d5c17e4167ff8 Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.502710 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.553322 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-qpzv7"] Sep 30 20:32:56 crc kubenswrapper[4919]: W0930 20:32:56.555889 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda78a5894_3d96_47fd_af15_15a6c66eb554.slice/crio-d94a960083b2d3fdce3fc159f8e476cfecf10d366bddd8b5a84e72704f2fd0f8 WatchSource:0}: Error finding container d94a960083b2d3fdce3fc159f8e476cfecf10d366bddd8b5a84e72704f2fd0f8: Status 404 returned error can't find the container with id d94a960083b2d3fdce3fc159f8e476cfecf10d366bddd8b5a84e72704f2fd0f8 Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.685757 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-hc97v"] Sep 30 20:32:56 crc kubenswrapper[4919]: W0930 20:32:56.699980 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod987212c4_8615_4a95_8779_768b5c0e0894.slice/crio-42fd1a8e355f431b7b4dc6512d3278b64903df036539882b873d9fcf3655a0e9 WatchSource:0}: Error finding container 42fd1a8e355f431b7b4dc6512d3278b64903df036539882b873d9fcf3655a0e9: Status 404 returned error can't find the container with id 42fd1a8e355f431b7b4dc6512d3278b64903df036539882b873d9fcf3655a0e9 Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.923241 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-hc97v" event={"ID":"987212c4-8615-4a95-8779-768b5c0e0894","Type":"ContainerStarted","Data":"42fd1a8e355f431b7b4dc6512d3278b64903df036539882b873d9fcf3655a0e9"} Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.926431 4919 generic.go:334] "Generic (PLEG): container finished" podID="a78a5894-3d96-47fd-af15-15a6c66eb554" containerID="16b47e5c0c0cc961ecbae1c136be515aa5a4d22385f0e293a0a08d31cd4b82e1" exitCode=0 Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.926527 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-qpzv7" event={"ID":"a78a5894-3d96-47fd-af15-15a6c66eb554","Type":"ContainerDied","Data":"16b47e5c0c0cc961ecbae1c136be515aa5a4d22385f0e293a0a08d31cd4b82e1"} Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.926568 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-qpzv7" event={"ID":"a78a5894-3d96-47fd-af15-15a6c66eb554","Type":"ContainerStarted","Data":"d94a960083b2d3fdce3fc159f8e476cfecf10d366bddd8b5a84e72704f2fd0f8"} Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.928326 4919 generic.go:334] "Generic (PLEG): container finished" podID="c442ecc0-a212-481d-add8-69ceb0c1cd1a" containerID="ec58b669e72c316835d3e02244b0f8afdd0a38ffe265593c4a749d3ddfd5e1de" exitCode=0 Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.928358 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5f5gt" event={"ID":"c442ecc0-a212-481d-add8-69ceb0c1cd1a","Type":"ContainerDied","Data":"ec58b669e72c316835d3e02244b0f8afdd0a38ffe265593c4a749d3ddfd5e1de"} Sep 30 20:32:56 crc kubenswrapper[4919]: I0930 20:32:56.928376 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5f5gt" event={"ID":"c442ecc0-a212-481d-add8-69ceb0c1cd1a","Type":"ContainerStarted","Data":"a13f1092cc7faebbf45c258ba0e63c5dabab3937852a64d3794d5c17e4167ff8"} Sep 30 20:32:57 crc kubenswrapper[4919]: I0930 20:32:57.938569 4919 generic.go:334] "Generic (PLEG): container finished" podID="987212c4-8615-4a95-8779-768b5c0e0894" containerID="d58d12d29e6f898761505a675ef3a1486fa32516182cac251985ca0cd9e5f286" exitCode=0 Sep 30 20:32:57 crc kubenswrapper[4919]: I0930 20:32:57.938784 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-hc97v" event={"ID":"987212c4-8615-4a95-8779-768b5c0e0894","Type":"ContainerDied","Data":"d58d12d29e6f898761505a675ef3a1486fa32516182cac251985ca0cd9e5f286"} Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.379383 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5f5gt" Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.397248 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-qpzv7" Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.457407 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncz5m\" (UniqueName: \"kubernetes.io/projected/a78a5894-3d96-47fd-af15-15a6c66eb554-kube-api-access-ncz5m\") pod \"a78a5894-3d96-47fd-af15-15a6c66eb554\" (UID: \"a78a5894-3d96-47fd-af15-15a6c66eb554\") " Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.457573 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxjw2\" (UniqueName: \"kubernetes.io/projected/c442ecc0-a212-481d-add8-69ceb0c1cd1a-kube-api-access-xxjw2\") pod \"c442ecc0-a212-481d-add8-69ceb0c1cd1a\" (UID: \"c442ecc0-a212-481d-add8-69ceb0c1cd1a\") " Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.463688 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a78a5894-3d96-47fd-af15-15a6c66eb554-kube-api-access-ncz5m" (OuterVolumeSpecName: "kube-api-access-ncz5m") pod "a78a5894-3d96-47fd-af15-15a6c66eb554" (UID: "a78a5894-3d96-47fd-af15-15a6c66eb554"). InnerVolumeSpecName "kube-api-access-ncz5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.464175 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c442ecc0-a212-481d-add8-69ceb0c1cd1a-kube-api-access-xxjw2" (OuterVolumeSpecName: "kube-api-access-xxjw2") pod "c442ecc0-a212-481d-add8-69ceb0c1cd1a" (UID: "c442ecc0-a212-481d-add8-69ceb0c1cd1a"). InnerVolumeSpecName "kube-api-access-xxjw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.559822 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxjw2\" (UniqueName: \"kubernetes.io/projected/c442ecc0-a212-481d-add8-69ceb0c1cd1a-kube-api-access-xxjw2\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.559858 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncz5m\" (UniqueName: \"kubernetes.io/projected/a78a5894-3d96-47fd-af15-15a6c66eb554-kube-api-access-ncz5m\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.952883 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-5f5gt" event={"ID":"c442ecc0-a212-481d-add8-69ceb0c1cd1a","Type":"ContainerDied","Data":"a13f1092cc7faebbf45c258ba0e63c5dabab3937852a64d3794d5c17e4167ff8"} Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.952942 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a13f1092cc7faebbf45c258ba0e63c5dabab3937852a64d3794d5c17e4167ff8" Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.952969 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-5f5gt" Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.954989 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-qpzv7" event={"ID":"a78a5894-3d96-47fd-af15-15a6c66eb554","Type":"ContainerDied","Data":"d94a960083b2d3fdce3fc159f8e476cfecf10d366bddd8b5a84e72704f2fd0f8"} Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.955020 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d94a960083b2d3fdce3fc159f8e476cfecf10d366bddd8b5a84e72704f2fd0f8" Sep 30 20:32:58 crc kubenswrapper[4919]: I0930 20:32:58.955026 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-qpzv7" Sep 30 20:32:59 crc kubenswrapper[4919]: I0930 20:32:59.368407 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-hc97v" Sep 30 20:32:59 crc kubenswrapper[4919]: I0930 20:32:59.477888 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bc75f\" (UniqueName: \"kubernetes.io/projected/987212c4-8615-4a95-8779-768b5c0e0894-kube-api-access-bc75f\") pod \"987212c4-8615-4a95-8779-768b5c0e0894\" (UID: \"987212c4-8615-4a95-8779-768b5c0e0894\") " Sep 30 20:32:59 crc kubenswrapper[4919]: I0930 20:32:59.485063 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/987212c4-8615-4a95-8779-768b5c0e0894-kube-api-access-bc75f" (OuterVolumeSpecName: "kube-api-access-bc75f") pod "987212c4-8615-4a95-8779-768b5c0e0894" (UID: "987212c4-8615-4a95-8779-768b5c0e0894"). InnerVolumeSpecName "kube-api-access-bc75f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:32:59 crc kubenswrapper[4919]: I0930 20:32:59.579717 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bc75f\" (UniqueName: \"kubernetes.io/projected/987212c4-8615-4a95-8779-768b5c0e0894-kube-api-access-bc75f\") on node \"crc\" DevicePath \"\"" Sep 30 20:32:59 crc kubenswrapper[4919]: I0930 20:32:59.966371 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-hc97v" event={"ID":"987212c4-8615-4a95-8779-768b5c0e0894","Type":"ContainerDied","Data":"42fd1a8e355f431b7b4dc6512d3278b64903df036539882b873d9fcf3655a0e9"} Sep 30 20:32:59 crc kubenswrapper[4919]: I0930 20:32:59.966690 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42fd1a8e355f431b7b4dc6512d3278b64903df036539882b873d9fcf3655a0e9" Sep 30 20:32:59 crc kubenswrapper[4919]: I0930 20:32:59.966476 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-hc97v" Sep 30 20:33:01 crc kubenswrapper[4919]: I0930 20:33:01.734521 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.185237 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.678374 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-16e9-account-create-h7pk9"] Sep 30 20:33:05 crc kubenswrapper[4919]: E0930 20:33:05.679110 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="987212c4-8615-4a95-8779-768b5c0e0894" containerName="mariadb-database-create" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.679135 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="987212c4-8615-4a95-8779-768b5c0e0894" containerName="mariadb-database-create" Sep 30 20:33:05 crc kubenswrapper[4919]: E0930 20:33:05.679170 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a78a5894-3d96-47fd-af15-15a6c66eb554" containerName="mariadb-database-create" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.679179 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="a78a5894-3d96-47fd-af15-15a6c66eb554" containerName="mariadb-database-create" Sep 30 20:33:05 crc kubenswrapper[4919]: E0930 20:33:05.679197 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c442ecc0-a212-481d-add8-69ceb0c1cd1a" containerName="mariadb-database-create" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.679227 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c442ecc0-a212-481d-add8-69ceb0c1cd1a" containerName="mariadb-database-create" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.679505 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="c442ecc0-a212-481d-add8-69ceb0c1cd1a" containerName="mariadb-database-create" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.679529 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="987212c4-8615-4a95-8779-768b5c0e0894" containerName="mariadb-database-create" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.679550 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="a78a5894-3d96-47fd-af15-15a6c66eb554" containerName="mariadb-database-create" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.680491 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-16e9-account-create-h7pk9" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.687272 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-16e9-account-create-h7pk9"] Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.727513 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.830037 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-724q5\" (UniqueName: \"kubernetes.io/projected/a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9-kube-api-access-724q5\") pod \"nova-api-16e9-account-create-h7pk9\" (UID: \"a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9\") " pod="openstack/nova-api-16e9-account-create-h7pk9" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.882180 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-b360-account-create-p9sdw"] Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.883868 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b360-account-create-p9sdw" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.885822 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.889422 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b360-account-create-p9sdw"] Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.931967 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-724q5\" (UniqueName: \"kubernetes.io/projected/a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9-kube-api-access-724q5\") pod \"nova-api-16e9-account-create-h7pk9\" (UID: \"a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9\") " pod="openstack/nova-api-16e9-account-create-h7pk9" Sep 30 20:33:05 crc kubenswrapper[4919]: I0930 20:33:05.956876 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-724q5\" (UniqueName: \"kubernetes.io/projected/a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9-kube-api-access-724q5\") pod \"nova-api-16e9-account-create-h7pk9\" (UID: \"a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9\") " pod="openstack/nova-api-16e9-account-create-h7pk9" Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.034308 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zgbl\" (UniqueName: \"kubernetes.io/projected/17ef649b-5bb6-4b14-934a-475366b7b842-kube-api-access-7zgbl\") pod \"nova-cell0-b360-account-create-p9sdw\" (UID: \"17ef649b-5bb6-4b14-934a-475366b7b842\") " pod="openstack/nova-cell0-b360-account-create-p9sdw" Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.073167 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-16e9-account-create-h7pk9" Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.078776 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-bf0f-account-create-l9kml"] Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.080114 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bf0f-account-create-l9kml" Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.082554 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.105091 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-bf0f-account-create-l9kml"] Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.136872 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zgbl\" (UniqueName: \"kubernetes.io/projected/17ef649b-5bb6-4b14-934a-475366b7b842-kube-api-access-7zgbl\") pod \"nova-cell0-b360-account-create-p9sdw\" (UID: \"17ef649b-5bb6-4b14-934a-475366b7b842\") " pod="openstack/nova-cell0-b360-account-create-p9sdw" Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.170033 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zgbl\" (UniqueName: \"kubernetes.io/projected/17ef649b-5bb6-4b14-934a-475366b7b842-kube-api-access-7zgbl\") pod \"nova-cell0-b360-account-create-p9sdw\" (UID: \"17ef649b-5bb6-4b14-934a-475366b7b842\") " pod="openstack/nova-cell0-b360-account-create-p9sdw" Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.205454 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b360-account-create-p9sdw" Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.250270 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df8dx\" (UniqueName: \"kubernetes.io/projected/315c9e3b-9f0f-41be-bff1-282740802b24-kube-api-access-df8dx\") pod \"nova-cell1-bf0f-account-create-l9kml\" (UID: \"315c9e3b-9f0f-41be-bff1-282740802b24\") " pod="openstack/nova-cell1-bf0f-account-create-l9kml" Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.352124 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df8dx\" (UniqueName: \"kubernetes.io/projected/315c9e3b-9f0f-41be-bff1-282740802b24-kube-api-access-df8dx\") pod \"nova-cell1-bf0f-account-create-l9kml\" (UID: \"315c9e3b-9f0f-41be-bff1-282740802b24\") " pod="openstack/nova-cell1-bf0f-account-create-l9kml" Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.373514 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df8dx\" (UniqueName: \"kubernetes.io/projected/315c9e3b-9f0f-41be-bff1-282740802b24-kube-api-access-df8dx\") pod \"nova-cell1-bf0f-account-create-l9kml\" (UID: \"315c9e3b-9f0f-41be-bff1-282740802b24\") " pod="openstack/nova-cell1-bf0f-account-create-l9kml" Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.531894 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bf0f-account-create-l9kml" Sep 30 20:33:06 crc kubenswrapper[4919]: W0930 20:33:06.588318 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda86fb69e_33fe_4ec9_b7ca_c5ef1bc859b9.slice/crio-6d9786f8b94528562cb43f92de132f65f2588a640fedef262d5ec4cc283b72b6 WatchSource:0}: Error finding container 6d9786f8b94528562cb43f92de132f65f2588a640fedef262d5ec4cc283b72b6: Status 404 returned error can't find the container with id 6d9786f8b94528562cb43f92de132f65f2588a640fedef262d5ec4cc283b72b6 Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.591519 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-16e9-account-create-h7pk9"] Sep 30 20:33:06 crc kubenswrapper[4919]: I0930 20:33:06.758555 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b360-account-create-p9sdw"] Sep 30 20:33:06 crc kubenswrapper[4919]: W0930 20:33:06.769224 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17ef649b_5bb6_4b14_934a_475366b7b842.slice/crio-5c3d9b8bbf62030bcacedefba8843a3e3516c578ac90133f038ff60692b3de8f WatchSource:0}: Error finding container 5c3d9b8bbf62030bcacedefba8843a3e3516c578ac90133f038ff60692b3de8f: Status 404 returned error can't find the container with id 5c3d9b8bbf62030bcacedefba8843a3e3516c578ac90133f038ff60692b3de8f Sep 30 20:33:07 crc kubenswrapper[4919]: I0930 20:33:07.034173 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b360-account-create-p9sdw" event={"ID":"17ef649b-5bb6-4b14-934a-475366b7b842","Type":"ContainerStarted","Data":"97963b2ee3b33c26b856319b87af07b393242e102cf5e6614fb525d828f3d874"} Sep 30 20:33:07 crc kubenswrapper[4919]: I0930 20:33:07.034295 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b360-account-create-p9sdw" event={"ID":"17ef649b-5bb6-4b14-934a-475366b7b842","Type":"ContainerStarted","Data":"5c3d9b8bbf62030bcacedefba8843a3e3516c578ac90133f038ff60692b3de8f"} Sep 30 20:33:07 crc kubenswrapper[4919]: I0930 20:33:07.036164 4919 generic.go:334] "Generic (PLEG): container finished" podID="a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9" containerID="6ceebd0a18b1080dd1c41a198a44e9b9db57c5b4297cafd8664cb74182966cd7" exitCode=0 Sep 30 20:33:07 crc kubenswrapper[4919]: I0930 20:33:07.036209 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-16e9-account-create-h7pk9" event={"ID":"a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9","Type":"ContainerDied","Data":"6ceebd0a18b1080dd1c41a198a44e9b9db57c5b4297cafd8664cb74182966cd7"} Sep 30 20:33:07 crc kubenswrapper[4919]: I0930 20:33:07.036262 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-16e9-account-create-h7pk9" event={"ID":"a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9","Type":"ContainerStarted","Data":"6d9786f8b94528562cb43f92de132f65f2588a640fedef262d5ec4cc283b72b6"} Sep 30 20:33:07 crc kubenswrapper[4919]: I0930 20:33:07.042674 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-bf0f-account-create-l9kml"] Sep 30 20:33:08 crc kubenswrapper[4919]: I0930 20:33:08.052415 4919 generic.go:334] "Generic (PLEG): container finished" podID="315c9e3b-9f0f-41be-bff1-282740802b24" containerID="df33187181f3336afdb3910ec292c2fea8847c1544a0ae1878a0108fc70ec581" exitCode=0 Sep 30 20:33:08 crc kubenswrapper[4919]: I0930 20:33:08.052478 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bf0f-account-create-l9kml" event={"ID":"315c9e3b-9f0f-41be-bff1-282740802b24","Type":"ContainerDied","Data":"df33187181f3336afdb3910ec292c2fea8847c1544a0ae1878a0108fc70ec581"} Sep 30 20:33:08 crc kubenswrapper[4919]: I0930 20:33:08.052906 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bf0f-account-create-l9kml" event={"ID":"315c9e3b-9f0f-41be-bff1-282740802b24","Type":"ContainerStarted","Data":"02100d415c3557536da3d79ada72399cd399ae74440d7240fc7ae4b91ba74613"} Sep 30 20:33:08 crc kubenswrapper[4919]: I0930 20:33:08.057697 4919 generic.go:334] "Generic (PLEG): container finished" podID="17ef649b-5bb6-4b14-934a-475366b7b842" containerID="97963b2ee3b33c26b856319b87af07b393242e102cf5e6614fb525d828f3d874" exitCode=0 Sep 30 20:33:08 crc kubenswrapper[4919]: I0930 20:33:08.057938 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b360-account-create-p9sdw" event={"ID":"17ef649b-5bb6-4b14-934a-475366b7b842","Type":"ContainerDied","Data":"97963b2ee3b33c26b856319b87af07b393242e102cf5e6614fb525d828f3d874"} Sep 30 20:33:08 crc kubenswrapper[4919]: I0930 20:33:08.465948 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-16e9-account-create-h7pk9" Sep 30 20:33:08 crc kubenswrapper[4919]: I0930 20:33:08.593962 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-724q5\" (UniqueName: \"kubernetes.io/projected/a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9-kube-api-access-724q5\") pod \"a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9\" (UID: \"a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9\") " Sep 30 20:33:08 crc kubenswrapper[4919]: I0930 20:33:08.610980 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9-kube-api-access-724q5" (OuterVolumeSpecName: "kube-api-access-724q5") pod "a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9" (UID: "a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9"). InnerVolumeSpecName "kube-api-access-724q5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:08 crc kubenswrapper[4919]: I0930 20:33:08.697804 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-724q5\" (UniqueName: \"kubernetes.io/projected/a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9-kube-api-access-724q5\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:09 crc kubenswrapper[4919]: I0930 20:33:09.071518 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-16e9-account-create-h7pk9" event={"ID":"a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9","Type":"ContainerDied","Data":"6d9786f8b94528562cb43f92de132f65f2588a640fedef262d5ec4cc283b72b6"} Sep 30 20:33:09 crc kubenswrapper[4919]: I0930 20:33:09.071579 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d9786f8b94528562cb43f92de132f65f2588a640fedef262d5ec4cc283b72b6" Sep 30 20:33:09 crc kubenswrapper[4919]: I0930 20:33:09.071688 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-16e9-account-create-h7pk9" Sep 30 20:33:09 crc kubenswrapper[4919]: I0930 20:33:09.679937 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bf0f-account-create-l9kml" Sep 30 20:33:09 crc kubenswrapper[4919]: I0930 20:33:09.686923 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b360-account-create-p9sdw" Sep 30 20:33:09 crc kubenswrapper[4919]: I0930 20:33:09.832148 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-df8dx\" (UniqueName: \"kubernetes.io/projected/315c9e3b-9f0f-41be-bff1-282740802b24-kube-api-access-df8dx\") pod \"315c9e3b-9f0f-41be-bff1-282740802b24\" (UID: \"315c9e3b-9f0f-41be-bff1-282740802b24\") " Sep 30 20:33:09 crc kubenswrapper[4919]: I0930 20:33:09.832414 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7zgbl\" (UniqueName: \"kubernetes.io/projected/17ef649b-5bb6-4b14-934a-475366b7b842-kube-api-access-7zgbl\") pod \"17ef649b-5bb6-4b14-934a-475366b7b842\" (UID: \"17ef649b-5bb6-4b14-934a-475366b7b842\") " Sep 30 20:33:09 crc kubenswrapper[4919]: I0930 20:33:09.838789 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/315c9e3b-9f0f-41be-bff1-282740802b24-kube-api-access-df8dx" (OuterVolumeSpecName: "kube-api-access-df8dx") pod "315c9e3b-9f0f-41be-bff1-282740802b24" (UID: "315c9e3b-9f0f-41be-bff1-282740802b24"). InnerVolumeSpecName "kube-api-access-df8dx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:09 crc kubenswrapper[4919]: I0930 20:33:09.839257 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17ef649b-5bb6-4b14-934a-475366b7b842-kube-api-access-7zgbl" (OuterVolumeSpecName: "kube-api-access-7zgbl") pod "17ef649b-5bb6-4b14-934a-475366b7b842" (UID: "17ef649b-5bb6-4b14-934a-475366b7b842"). InnerVolumeSpecName "kube-api-access-7zgbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:09 crc kubenswrapper[4919]: I0930 20:33:09.934796 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7zgbl\" (UniqueName: \"kubernetes.io/projected/17ef649b-5bb6-4b14-934a-475366b7b842-kube-api-access-7zgbl\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:09 crc kubenswrapper[4919]: I0930 20:33:09.935075 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-df8dx\" (UniqueName: \"kubernetes.io/projected/315c9e3b-9f0f-41be-bff1-282740802b24-kube-api-access-df8dx\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:10 crc kubenswrapper[4919]: I0930 20:33:10.086712 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b360-account-create-p9sdw" event={"ID":"17ef649b-5bb6-4b14-934a-475366b7b842","Type":"ContainerDied","Data":"5c3d9b8bbf62030bcacedefba8843a3e3516c578ac90133f038ff60692b3de8f"} Sep 30 20:33:10 crc kubenswrapper[4919]: I0930 20:33:10.086764 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c3d9b8bbf62030bcacedefba8843a3e3516c578ac90133f038ff60692b3de8f" Sep 30 20:33:10 crc kubenswrapper[4919]: I0930 20:33:10.086771 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b360-account-create-p9sdw" Sep 30 20:33:10 crc kubenswrapper[4919]: I0930 20:33:10.089023 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bf0f-account-create-l9kml" event={"ID":"315c9e3b-9f0f-41be-bff1-282740802b24","Type":"ContainerDied","Data":"02100d415c3557536da3d79ada72399cd399ae74440d7240fc7ae4b91ba74613"} Sep 30 20:33:10 crc kubenswrapper[4919]: I0930 20:33:10.089041 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02100d415c3557536da3d79ada72399cd399ae74440d7240fc7ae4b91ba74613" Sep 30 20:33:10 crc kubenswrapper[4919]: I0930 20:33:10.089091 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bf0f-account-create-l9kml" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.100815 4919 generic.go:334] "Generic (PLEG): container finished" podID="830bc004-4464-408b-9696-1c69dcbcc793" containerID="c68cd5f8cfa139223a59a999d0eca3a1b46126cfb5a9f5b3d2960a79d3bc97e5" exitCode=137 Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.100840 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"830bc004-4464-408b-9696-1c69dcbcc793","Type":"ContainerDied","Data":"c68cd5f8cfa139223a59a999d0eca3a1b46126cfb5a9f5b3d2960a79d3bc97e5"} Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.101259 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"830bc004-4464-408b-9696-1c69dcbcc793","Type":"ContainerDied","Data":"218db87c25b8d4292b025cedfc2c474f49c17ec178e0d59e2fa2339458cc9da2"} Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.101277 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="218db87c25b8d4292b025cedfc2c474f49c17ec178e0d59e2fa2339458cc9da2" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.106555 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.126949 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vbbrr"] Sep 30 20:33:11 crc kubenswrapper[4919]: E0930 20:33:11.127342 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="sg-core" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127360 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="sg-core" Sep 30 20:33:11 crc kubenswrapper[4919]: E0930 20:33:11.127373 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="315c9e3b-9f0f-41be-bff1-282740802b24" containerName="mariadb-account-create" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127380 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="315c9e3b-9f0f-41be-bff1-282740802b24" containerName="mariadb-account-create" Sep 30 20:33:11 crc kubenswrapper[4919]: E0930 20:33:11.127402 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="ceilometer-central-agent" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127409 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="ceilometer-central-agent" Sep 30 20:33:11 crc kubenswrapper[4919]: E0930 20:33:11.127422 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="ceilometer-notification-agent" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127428 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="ceilometer-notification-agent" Sep 30 20:33:11 crc kubenswrapper[4919]: E0930 20:33:11.127435 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17ef649b-5bb6-4b14-934a-475366b7b842" containerName="mariadb-account-create" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127441 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="17ef649b-5bb6-4b14-934a-475366b7b842" containerName="mariadb-account-create" Sep 30 20:33:11 crc kubenswrapper[4919]: E0930 20:33:11.127459 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="proxy-httpd" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127465 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="proxy-httpd" Sep 30 20:33:11 crc kubenswrapper[4919]: E0930 20:33:11.127481 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9" containerName="mariadb-account-create" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127487 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9" containerName="mariadb-account-create" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127669 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="proxy-httpd" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127684 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="sg-core" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127699 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="ceilometer-central-agent" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127717 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9" containerName="mariadb-account-create" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127729 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="17ef649b-5bb6-4b14-934a-475366b7b842" containerName="mariadb-account-create" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127739 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="830bc004-4464-408b-9696-1c69dcbcc793" containerName="ceilometer-notification-agent" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.127751 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="315c9e3b-9f0f-41be-bff1-282740802b24" containerName="mariadb-account-create" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.128375 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.133588 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.133832 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-4cl7w" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.134017 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.136974 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vbbrr"] Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.264498 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-scripts\") pod \"830bc004-4464-408b-9696-1c69dcbcc793\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.264618 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-sg-core-conf-yaml\") pod \"830bc004-4464-408b-9696-1c69dcbcc793\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.264658 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-config-data\") pod \"830bc004-4464-408b-9696-1c69dcbcc793\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.264711 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tr22f\" (UniqueName: \"kubernetes.io/projected/830bc004-4464-408b-9696-1c69dcbcc793-kube-api-access-tr22f\") pod \"830bc004-4464-408b-9696-1c69dcbcc793\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.264744 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/830bc004-4464-408b-9696-1c69dcbcc793-run-httpd\") pod \"830bc004-4464-408b-9696-1c69dcbcc793\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.264768 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-combined-ca-bundle\") pod \"830bc004-4464-408b-9696-1c69dcbcc793\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.264899 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/830bc004-4464-408b-9696-1c69dcbcc793-log-httpd\") pod \"830bc004-4464-408b-9696-1c69dcbcc793\" (UID: \"830bc004-4464-408b-9696-1c69dcbcc793\") " Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.265198 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vbbrr\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.265276 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-config-data\") pod \"nova-cell0-conductor-db-sync-vbbrr\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.265347 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-scripts\") pod \"nova-cell0-conductor-db-sync-vbbrr\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.265442 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfn8c\" (UniqueName: \"kubernetes.io/projected/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-kube-api-access-vfn8c\") pod \"nova-cell0-conductor-db-sync-vbbrr\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.266713 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/830bc004-4464-408b-9696-1c69dcbcc793-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "830bc004-4464-408b-9696-1c69dcbcc793" (UID: "830bc004-4464-408b-9696-1c69dcbcc793"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.267884 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/830bc004-4464-408b-9696-1c69dcbcc793-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "830bc004-4464-408b-9696-1c69dcbcc793" (UID: "830bc004-4464-408b-9696-1c69dcbcc793"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.271434 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/830bc004-4464-408b-9696-1c69dcbcc793-kube-api-access-tr22f" (OuterVolumeSpecName: "kube-api-access-tr22f") pod "830bc004-4464-408b-9696-1c69dcbcc793" (UID: "830bc004-4464-408b-9696-1c69dcbcc793"). InnerVolumeSpecName "kube-api-access-tr22f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.271521 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-scripts" (OuterVolumeSpecName: "scripts") pod "830bc004-4464-408b-9696-1c69dcbcc793" (UID: "830bc004-4464-408b-9696-1c69dcbcc793"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.298979 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "830bc004-4464-408b-9696-1c69dcbcc793" (UID: "830bc004-4464-408b-9696-1c69dcbcc793"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.367519 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfn8c\" (UniqueName: \"kubernetes.io/projected/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-kube-api-access-vfn8c\") pod \"nova-cell0-conductor-db-sync-vbbrr\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.367662 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vbbrr\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.367706 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-config-data\") pod \"nova-cell0-conductor-db-sync-vbbrr\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.367763 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-scripts\") pod \"nova-cell0-conductor-db-sync-vbbrr\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.367809 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.367819 4919 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.367828 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tr22f\" (UniqueName: \"kubernetes.io/projected/830bc004-4464-408b-9696-1c69dcbcc793-kube-api-access-tr22f\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.367836 4919 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/830bc004-4464-408b-9696-1c69dcbcc793-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.367845 4919 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/830bc004-4464-408b-9696-1c69dcbcc793-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.372319 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-vbbrr\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.373047 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-config-data\") pod \"nova-cell0-conductor-db-sync-vbbrr\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.374120 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-scripts\") pod \"nova-cell0-conductor-db-sync-vbbrr\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.375408 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "830bc004-4464-408b-9696-1c69dcbcc793" (UID: "830bc004-4464-408b-9696-1c69dcbcc793"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.384551 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfn8c\" (UniqueName: \"kubernetes.io/projected/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-kube-api-access-vfn8c\") pod \"nova-cell0-conductor-db-sync-vbbrr\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.404577 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-config-data" (OuterVolumeSpecName: "config-data") pod "830bc004-4464-408b-9696-1c69dcbcc793" (UID: "830bc004-4464-408b-9696-1c69dcbcc793"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.463845 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.469660 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.469699 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/830bc004-4464-408b-9696-1c69dcbcc793-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:11 crc kubenswrapper[4919]: I0930 20:33:11.946480 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vbbrr"] Sep 30 20:33:11 crc kubenswrapper[4919]: W0930 20:33:11.947364 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc48fdc4f_d6dc_4f45_8c71_6ae82bece275.slice/crio-6ec879c93d241de77f0a4f3ea853e6103c5a976983e590628860cc1f80685d6f WatchSource:0}: Error finding container 6ec879c93d241de77f0a4f3ea853e6103c5a976983e590628860cc1f80685d6f: Status 404 returned error can't find the container with id 6ec879c93d241de77f0a4f3ea853e6103c5a976983e590628860cc1f80685d6f Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.111304 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vbbrr" event={"ID":"c48fdc4f-d6dc-4f45-8c71-6ae82bece275","Type":"ContainerStarted","Data":"6ec879c93d241de77f0a4f3ea853e6103c5a976983e590628860cc1f80685d6f"} Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.111371 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.144397 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.154485 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.181006 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.183578 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.185595 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.185732 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.192660 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.283688 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwhcr\" (UniqueName: \"kubernetes.io/projected/9b1898ef-3ec1-4e26-9332-a1402f89e08e-kube-api-access-xwhcr\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.283757 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.283797 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-scripts\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.283816 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.283998 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b1898ef-3ec1-4e26-9332-a1402f89e08e-log-httpd\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.284113 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-config-data\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.284599 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b1898ef-3ec1-4e26-9332-a1402f89e08e-run-httpd\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.385754 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-scripts\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.385804 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.385837 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b1898ef-3ec1-4e26-9332-a1402f89e08e-log-httpd\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.385862 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-config-data\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.385941 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b1898ef-3ec1-4e26-9332-a1402f89e08e-run-httpd\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.385987 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwhcr\" (UniqueName: \"kubernetes.io/projected/9b1898ef-3ec1-4e26-9332-a1402f89e08e-kube-api-access-xwhcr\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.386025 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.386730 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b1898ef-3ec1-4e26-9332-a1402f89e08e-log-httpd\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.386740 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b1898ef-3ec1-4e26-9332-a1402f89e08e-run-httpd\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.392031 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-scripts\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.400771 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.401430 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.402363 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-config-data\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.403892 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwhcr\" (UniqueName: \"kubernetes.io/projected/9b1898ef-3ec1-4e26-9332-a1402f89e08e-kube-api-access-xwhcr\") pod \"ceilometer-0\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.502368 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:33:12 crc kubenswrapper[4919]: I0930 20:33:12.947057 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:33:12 crc kubenswrapper[4919]: W0930 20:33:12.948385 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b1898ef_3ec1_4e26_9332_a1402f89e08e.slice/crio-ab06966c29690897bf0456896b7eb28dacee10e4acdd7e787af8b00ff8dd398c WatchSource:0}: Error finding container ab06966c29690897bf0456896b7eb28dacee10e4acdd7e787af8b00ff8dd398c: Status 404 returned error can't find the container with id ab06966c29690897bf0456896b7eb28dacee10e4acdd7e787af8b00ff8dd398c Sep 30 20:33:13 crc kubenswrapper[4919]: I0930 20:33:13.122311 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9b1898ef-3ec1-4e26-9332-a1402f89e08e","Type":"ContainerStarted","Data":"ab06966c29690897bf0456896b7eb28dacee10e4acdd7e787af8b00ff8dd398c"} Sep 30 20:33:13 crc kubenswrapper[4919]: I0930 20:33:13.644693 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="830bc004-4464-408b-9696-1c69dcbcc793" path="/var/lib/kubelet/pods/830bc004-4464-408b-9696-1c69dcbcc793/volumes" Sep 30 20:33:14 crc kubenswrapper[4919]: I0930 20:33:14.134345 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9b1898ef-3ec1-4e26-9332-a1402f89e08e","Type":"ContainerStarted","Data":"8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e"} Sep 30 20:33:15 crc kubenswrapper[4919]: I0930 20:33:15.146432 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9b1898ef-3ec1-4e26-9332-a1402f89e08e","Type":"ContainerStarted","Data":"3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1"} Sep 30 20:33:20 crc kubenswrapper[4919]: I0930 20:33:20.200580 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vbbrr" event={"ID":"c48fdc4f-d6dc-4f45-8c71-6ae82bece275","Type":"ContainerStarted","Data":"bcad8474bb62bf251e2a3de6999d78cc933c33f246eeb6eab5c4a28d7d386937"} Sep 30 20:33:20 crc kubenswrapper[4919]: I0930 20:33:20.202682 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9b1898ef-3ec1-4e26-9332-a1402f89e08e","Type":"ContainerStarted","Data":"dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486"} Sep 30 20:33:22 crc kubenswrapper[4919]: I0930 20:33:22.232210 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9b1898ef-3ec1-4e26-9332-a1402f89e08e","Type":"ContainerStarted","Data":"985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc"} Sep 30 20:33:22 crc kubenswrapper[4919]: I0930 20:33:22.232681 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:33:22 crc kubenswrapper[4919]: I0930 20:33:22.268858 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.7418674379999999 podStartE2EDuration="10.268820315s" podCreationTimestamp="2025-09-30 20:33:12 +0000 UTC" firstStartedPulling="2025-09-30 20:33:12.950599739 +0000 UTC m=+1178.066632866" lastFinishedPulling="2025-09-30 20:33:21.477552586 +0000 UTC m=+1186.593585743" observedRunningTime="2025-09-30 20:33:22.262689708 +0000 UTC m=+1187.378722875" watchObservedRunningTime="2025-09-30 20:33:22.268820315 +0000 UTC m=+1187.384853482" Sep 30 20:33:22 crc kubenswrapper[4919]: I0930 20:33:22.271797 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-vbbrr" podStartSLOduration=3.9395577250000002 podStartE2EDuration="11.2717766s" podCreationTimestamp="2025-09-30 20:33:11 +0000 UTC" firstStartedPulling="2025-09-30 20:33:11.949196542 +0000 UTC m=+1177.065229669" lastFinishedPulling="2025-09-30 20:33:19.281415387 +0000 UTC m=+1184.397448544" observedRunningTime="2025-09-30 20:33:20.224771631 +0000 UTC m=+1185.340804798" watchObservedRunningTime="2025-09-30 20:33:22.2717766 +0000 UTC m=+1187.387809807" Sep 30 20:33:29 crc kubenswrapper[4919]: I0930 20:33:29.325855 4919 generic.go:334] "Generic (PLEG): container finished" podID="c48fdc4f-d6dc-4f45-8c71-6ae82bece275" containerID="bcad8474bb62bf251e2a3de6999d78cc933c33f246eeb6eab5c4a28d7d386937" exitCode=0 Sep 30 20:33:29 crc kubenswrapper[4919]: I0930 20:33:29.325939 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vbbrr" event={"ID":"c48fdc4f-d6dc-4f45-8c71-6ae82bece275","Type":"ContainerDied","Data":"bcad8474bb62bf251e2a3de6999d78cc933c33f246eeb6eab5c4a28d7d386937"} Sep 30 20:33:30 crc kubenswrapper[4919]: I0930 20:33:30.793116 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:30 crc kubenswrapper[4919]: I0930 20:33:30.905702 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-config-data\") pod \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " Sep 30 20:33:30 crc kubenswrapper[4919]: I0930 20:33:30.905782 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-combined-ca-bundle\") pod \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " Sep 30 20:33:30 crc kubenswrapper[4919]: I0930 20:33:30.906016 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-scripts\") pod \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " Sep 30 20:33:30 crc kubenswrapper[4919]: I0930 20:33:30.906082 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfn8c\" (UniqueName: \"kubernetes.io/projected/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-kube-api-access-vfn8c\") pod \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\" (UID: \"c48fdc4f-d6dc-4f45-8c71-6ae82bece275\") " Sep 30 20:33:30 crc kubenswrapper[4919]: I0930 20:33:30.911734 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-scripts" (OuterVolumeSpecName: "scripts") pod "c48fdc4f-d6dc-4f45-8c71-6ae82bece275" (UID: "c48fdc4f-d6dc-4f45-8c71-6ae82bece275"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:30 crc kubenswrapper[4919]: I0930 20:33:30.913385 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-kube-api-access-vfn8c" (OuterVolumeSpecName: "kube-api-access-vfn8c") pod "c48fdc4f-d6dc-4f45-8c71-6ae82bece275" (UID: "c48fdc4f-d6dc-4f45-8c71-6ae82bece275"). InnerVolumeSpecName "kube-api-access-vfn8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:30 crc kubenswrapper[4919]: I0930 20:33:30.934058 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-config-data" (OuterVolumeSpecName: "config-data") pod "c48fdc4f-d6dc-4f45-8c71-6ae82bece275" (UID: "c48fdc4f-d6dc-4f45-8c71-6ae82bece275"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:30 crc kubenswrapper[4919]: I0930 20:33:30.946521 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c48fdc4f-d6dc-4f45-8c71-6ae82bece275" (UID: "c48fdc4f-d6dc-4f45-8c71-6ae82bece275"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.008146 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfn8c\" (UniqueName: \"kubernetes.io/projected/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-kube-api-access-vfn8c\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.008187 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.008201 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.008236 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c48fdc4f-d6dc-4f45-8c71-6ae82bece275-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.349806 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-vbbrr" event={"ID":"c48fdc4f-d6dc-4f45-8c71-6ae82bece275","Type":"ContainerDied","Data":"6ec879c93d241de77f0a4f3ea853e6103c5a976983e590628860cc1f80685d6f"} Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.350181 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ec879c93d241de77f0a4f3ea853e6103c5a976983e590628860cc1f80685d6f" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.349845 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-vbbrr" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.463647 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 20:33:31 crc kubenswrapper[4919]: E0930 20:33:31.464145 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c48fdc4f-d6dc-4f45-8c71-6ae82bece275" containerName="nova-cell0-conductor-db-sync" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.464171 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c48fdc4f-d6dc-4f45-8c71-6ae82bece275" containerName="nova-cell0-conductor-db-sync" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.464446 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="c48fdc4f-d6dc-4f45-8c71-6ae82bece275" containerName="nova-cell0-conductor-db-sync" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.465253 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.467745 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-4cl7w" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.467854 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.476639 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.619125 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45xld\" (UniqueName: \"kubernetes.io/projected/b8ef3110-c308-43a3-a7b8-18d0f7d50488-kube-api-access-45xld\") pod \"nova-cell0-conductor-0\" (UID: \"b8ef3110-c308-43a3-a7b8-18d0f7d50488\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.619206 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8ef3110-c308-43a3-a7b8-18d0f7d50488-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b8ef3110-c308-43a3-a7b8-18d0f7d50488\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.619547 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8ef3110-c308-43a3-a7b8-18d0f7d50488-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b8ef3110-c308-43a3-a7b8-18d0f7d50488\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.721794 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8ef3110-c308-43a3-a7b8-18d0f7d50488-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b8ef3110-c308-43a3-a7b8-18d0f7d50488\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.722715 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45xld\" (UniqueName: \"kubernetes.io/projected/b8ef3110-c308-43a3-a7b8-18d0f7d50488-kube-api-access-45xld\") pod \"nova-cell0-conductor-0\" (UID: \"b8ef3110-c308-43a3-a7b8-18d0f7d50488\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.722762 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8ef3110-c308-43a3-a7b8-18d0f7d50488-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b8ef3110-c308-43a3-a7b8-18d0f7d50488\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.741532 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8ef3110-c308-43a3-a7b8-18d0f7d50488-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b8ef3110-c308-43a3-a7b8-18d0f7d50488\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.741786 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8ef3110-c308-43a3-a7b8-18d0f7d50488-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b8ef3110-c308-43a3-a7b8-18d0f7d50488\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.744732 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45xld\" (UniqueName: \"kubernetes.io/projected/b8ef3110-c308-43a3-a7b8-18d0f7d50488-kube-api-access-45xld\") pod \"nova-cell0-conductor-0\" (UID: \"b8ef3110-c308-43a3-a7b8-18d0f7d50488\") " pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:31 crc kubenswrapper[4919]: I0930 20:33:31.790008 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:32 crc kubenswrapper[4919]: I0930 20:33:32.266406 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 30 20:33:32 crc kubenswrapper[4919]: I0930 20:33:32.362901 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b8ef3110-c308-43a3-a7b8-18d0f7d50488","Type":"ContainerStarted","Data":"325e21726b8aad88323df96cea18105a2e2210a41a4a1ca5212309c7f8e0be4b"} Sep 30 20:33:33 crc kubenswrapper[4919]: I0930 20:33:33.374981 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b8ef3110-c308-43a3-a7b8-18d0f7d50488","Type":"ContainerStarted","Data":"a8123eaa674b5ee80b96db8eaa97333ebacab262da96a37f4bf4137eb7950715"} Sep 30 20:33:33 crc kubenswrapper[4919]: I0930 20:33:33.375495 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:33 crc kubenswrapper[4919]: I0930 20:33:33.401568 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.401549279 podStartE2EDuration="2.401549279s" podCreationTimestamp="2025-09-30 20:33:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:33:33.39291918 +0000 UTC m=+1198.508952317" watchObservedRunningTime="2025-09-30 20:33:33.401549279 +0000 UTC m=+1198.517582416" Sep 30 20:33:36 crc kubenswrapper[4919]: I0930 20:33:36.931350 4919 scope.go:117] "RemoveContainer" containerID="04ce7d1265f3546484be0e8f022e5ce65067446506b43e794ef382cb0df03032" Sep 30 20:33:36 crc kubenswrapper[4919]: I0930 20:33:36.983266 4919 scope.go:117] "RemoveContainer" containerID="90af94a93598a67e4a7064f64298d4f282c4977d2a4ed9a6a29510fa7360150f" Sep 30 20:33:41 crc kubenswrapper[4919]: I0930 20:33:41.842168 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.422796 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-w646s"] Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.424602 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.428882 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.435464 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-w646s"] Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.436433 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.514419 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.561204 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-scripts\") pod \"nova-cell0-cell-mapping-w646s\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.561498 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-w646s\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.561689 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-config-data\") pod \"nova-cell0-cell-mapping-w646s\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.561762 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvh7h\" (UniqueName: \"kubernetes.io/projected/222832c8-8bfd-460e-ae09-5594896b36fc-kube-api-access-rvh7h\") pod \"nova-cell0-cell-mapping-w646s\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.604874 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.606733 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.608552 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.632454 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.648630 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.650286 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.653144 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.663571 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-scripts\") pod \"nova-cell0-cell-mapping-w646s\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.663699 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-w646s\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.663825 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-config-data\") pod \"nova-cell0-cell-mapping-w646s\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.663948 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvh7h\" (UniqueName: \"kubernetes.io/projected/222832c8-8bfd-460e-ae09-5594896b36fc-kube-api-access-rvh7h\") pod \"nova-cell0-cell-mapping-w646s\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.676747 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-scripts\") pod \"nova-cell0-cell-mapping-w646s\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.679708 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-config-data\") pod \"nova-cell0-cell-mapping-w646s\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.688166 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.696451 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-w646s\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.700750 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvh7h\" (UniqueName: \"kubernetes.io/projected/222832c8-8bfd-460e-ae09-5594896b36fc-kube-api-access-rvh7h\") pod \"nova-cell0-cell-mapping-w646s\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.746229 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.750304 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.751716 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.756625 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.765369 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c36cbd0-636c-44e4-855a-9e407e31361e-config-data\") pod \"nova-api-0\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.765690 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.765963 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.766081 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzkvz\" (UniqueName: \"kubernetes.io/projected/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-kube-api-access-xzkvz\") pod \"nova-cell1-novncproxy-0\" (UID: \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.766206 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c36cbd0-636c-44e4-855a-9e407e31361e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.766357 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c36cbd0-636c-44e4-855a-9e407e31361e-logs\") pod \"nova-api-0\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.766480 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwvcj\" (UniqueName: \"kubernetes.io/projected/8c36cbd0-636c-44e4-855a-9e407e31361e-kube-api-access-zwvcj\") pod \"nova-api-0\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.806726 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.808527 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.814693 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.818161 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.839207 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.874137 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c36cbd0-636c-44e4-855a-9e407e31361e-config-data\") pod \"nova-api-0\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.874998 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkrbq\" (UniqueName: \"kubernetes.io/projected/52639b1d-3091-4729-acb6-cdc172e62912-kube-api-access-vkrbq\") pod \"nova-metadata-0\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " pod="openstack/nova-metadata-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.875115 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52639b1d-3091-4729-acb6-cdc172e62912-config-data\") pod \"nova-metadata-0\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " pod="openstack/nova-metadata-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.875410 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.875573 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.875926 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzkvz\" (UniqueName: \"kubernetes.io/projected/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-kube-api-access-xzkvz\") pod \"nova-cell1-novncproxy-0\" (UID: \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.876044 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c36cbd0-636c-44e4-855a-9e407e31361e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.876117 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c36cbd0-636c-44e4-855a-9e407e31361e-logs\") pod \"nova-api-0\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.876192 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwvcj\" (UniqueName: \"kubernetes.io/projected/8c36cbd0-636c-44e4-855a-9e407e31361e-kube-api-access-zwvcj\") pod \"nova-api-0\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.876313 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52639b1d-3091-4729-acb6-cdc172e62912-logs\") pod \"nova-metadata-0\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " pod="openstack/nova-metadata-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.876388 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52639b1d-3091-4729-acb6-cdc172e62912-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " pod="openstack/nova-metadata-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.877194 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c36cbd0-636c-44e4-855a-9e407e31361e-logs\") pod \"nova-api-0\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.883666 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c36cbd0-636c-44e4-855a-9e407e31361e-config-data\") pod \"nova-api-0\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.885912 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c36cbd0-636c-44e4-855a-9e407e31361e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.886487 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.908880 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.921725 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwvcj\" (UniqueName: \"kubernetes.io/projected/8c36cbd0-636c-44e4-855a-9e407e31361e-kube-api-access-zwvcj\") pod \"nova-api-0\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.922017 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzkvz\" (UniqueName: \"kubernetes.io/projected/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-kube-api-access-xzkvz\") pod \"nova-cell1-novncproxy-0\" (UID: \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.929697 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.936550 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-4gvsn"] Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.938616 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.943844 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-4gvsn"] Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.978112 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\") " pod="openstack/nova-scheduler-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.978208 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scskv\" (UniqueName: \"kubernetes.io/projected/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-kube-api-access-scskv\") pod \"nova-scheduler-0\" (UID: \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\") " pod="openstack/nova-scheduler-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.978281 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52639b1d-3091-4729-acb6-cdc172e62912-logs\") pod \"nova-metadata-0\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " pod="openstack/nova-metadata-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.978301 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-config-data\") pod \"nova-scheduler-0\" (UID: \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\") " pod="openstack/nova-scheduler-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.978319 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52639b1d-3091-4729-acb6-cdc172e62912-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " pod="openstack/nova-metadata-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.978349 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkrbq\" (UniqueName: \"kubernetes.io/projected/52639b1d-3091-4729-acb6-cdc172e62912-kube-api-access-vkrbq\") pod \"nova-metadata-0\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " pod="openstack/nova-metadata-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.978381 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52639b1d-3091-4729-acb6-cdc172e62912-config-data\") pod \"nova-metadata-0\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " pod="openstack/nova-metadata-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.982610 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52639b1d-3091-4729-acb6-cdc172e62912-logs\") pod \"nova-metadata-0\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " pod="openstack/nova-metadata-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.986448 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52639b1d-3091-4729-acb6-cdc172e62912-config-data\") pod \"nova-metadata-0\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " pod="openstack/nova-metadata-0" Sep 30 20:33:42 crc kubenswrapper[4919]: I0930 20:33:42.995918 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52639b1d-3091-4729-acb6-cdc172e62912-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " pod="openstack/nova-metadata-0" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.006502 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkrbq\" (UniqueName: \"kubernetes.io/projected/52639b1d-3091-4729-acb6-cdc172e62912-kube-api-access-vkrbq\") pod \"nova-metadata-0\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " pod="openstack/nova-metadata-0" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.059983 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.081988 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scskv\" (UniqueName: \"kubernetes.io/projected/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-kube-api-access-scskv\") pod \"nova-scheduler-0\" (UID: \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\") " pod="openstack/nova-scheduler-0" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.082086 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwqp8\" (UniqueName: \"kubernetes.io/projected/df423b76-458b-49a2-94e0-cf51312f09a6-kube-api-access-pwqp8\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.082131 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-config-data\") pod \"nova-scheduler-0\" (UID: \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\") " pod="openstack/nova-scheduler-0" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.082193 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-config\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.082226 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.082250 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.082268 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.082296 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.082321 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\") " pod="openstack/nova-scheduler-0" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.087984 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-config-data\") pod \"nova-scheduler-0\" (UID: \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\") " pod="openstack/nova-scheduler-0" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.088029 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\") " pod="openstack/nova-scheduler-0" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.100903 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scskv\" (UniqueName: \"kubernetes.io/projected/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-kube-api-access-scskv\") pod \"nova-scheduler-0\" (UID: \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\") " pod="openstack/nova-scheduler-0" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.178675 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.183416 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwqp8\" (UniqueName: \"kubernetes.io/projected/df423b76-458b-49a2-94e0-cf51312f09a6-kube-api-access-pwqp8\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.183507 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-config\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.183526 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.183548 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.183571 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.183597 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.184449 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-ovsdbserver-sb\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.185622 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-config\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.187263 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-dns-svc\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.187343 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-dns-swift-storage-0\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.187800 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-ovsdbserver-nb\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.201416 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwqp8\" (UniqueName: \"kubernetes.io/projected/df423b76-458b-49a2-94e0-cf51312f09a6-kube-api-access-pwqp8\") pod \"dnsmasq-dns-845d6d6f59-4gvsn\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.235467 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.272987 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.344347 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-w646s"] Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.503809 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w646s" event={"ID":"222832c8-8bfd-460e-ae09-5594896b36fc","Type":"ContainerStarted","Data":"758f177e5e7083ce0b6b3a31c4c35bbb59bb8f681cc62bdf6d6b97c6aabb1ffb"} Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.509478 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:33:43 crc kubenswrapper[4919]: W0930 20:33:43.515192 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c36cbd0_636c_44e4_855a_9e407e31361e.slice/crio-dbf53af56ddcea2579a6f49d663ad94041b4e77eb791349a74018f5037003e51 WatchSource:0}: Error finding container dbf53af56ddcea2579a6f49d663ad94041b4e77eb791349a74018f5037003e51: Status 404 returned error can't find the container with id dbf53af56ddcea2579a6f49d663ad94041b4e77eb791349a74018f5037003e51 Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.525510 4919 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.583205 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-54fj2"] Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.588002 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.593800 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-54fj2"] Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.594064 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.594064 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 20:33:43 crc kubenswrapper[4919]: W0930 20:33:43.621646 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddbd6a165_4eb6_4758_831e_01f3ef8f0b30.slice/crio-a3f238e9b564752622e442dd68c2cd06faa110ff4ddaf35d407ec0402072b092 WatchSource:0}: Error finding container a3f238e9b564752622e442dd68c2cd06faa110ff4ddaf35d407ec0402072b092: Status 404 returned error can't find the container with id a3f238e9b564752622e442dd68c2cd06faa110ff4ddaf35d407ec0402072b092 Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.622776 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.695841 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-scripts\") pod \"nova-cell1-conductor-db-sync-54fj2\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.696145 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cv5tn\" (UniqueName: \"kubernetes.io/projected/1a83d10d-4a42-4177-b227-0da1b675c06b-kube-api-access-cv5tn\") pod \"nova-cell1-conductor-db-sync-54fj2\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.696210 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-config-data\") pod \"nova-cell1-conductor-db-sync-54fj2\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.698754 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-54fj2\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.805736 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-config-data\") pod \"nova-cell1-conductor-db-sync-54fj2\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.805842 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-54fj2\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.805913 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-scripts\") pod \"nova-cell1-conductor-db-sync-54fj2\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.805941 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cv5tn\" (UniqueName: \"kubernetes.io/projected/1a83d10d-4a42-4177-b227-0da1b675c06b-kube-api-access-cv5tn\") pod \"nova-cell1-conductor-db-sync-54fj2\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.813970 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-config-data\") pod \"nova-cell1-conductor-db-sync-54fj2\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.814529 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-scripts\") pod \"nova-cell1-conductor-db-sync-54fj2\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.830919 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cv5tn\" (UniqueName: \"kubernetes.io/projected/1a83d10d-4a42-4177-b227-0da1b675c06b-kube-api-access-cv5tn\") pod \"nova-cell1-conductor-db-sync-54fj2\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.832130 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-54fj2\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.880467 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.922145 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:43 crc kubenswrapper[4919]: I0930 20:33:43.945298 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:33:44 crc kubenswrapper[4919]: I0930 20:33:44.018754 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-4gvsn"] Sep 30 20:33:44 crc kubenswrapper[4919]: I0930 20:33:44.298715 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-54fj2"] Sep 30 20:33:44 crc kubenswrapper[4919]: W0930 20:33:44.373428 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a83d10d_4a42_4177_b227_0da1b675c06b.slice/crio-52424be9789b974c4c772a9216bcff92b9ada31eb8db067099bb6ff0ebfae0c5 WatchSource:0}: Error finding container 52424be9789b974c4c772a9216bcff92b9ada31eb8db067099bb6ff0ebfae0c5: Status 404 returned error can't find the container with id 52424be9789b974c4c772a9216bcff92b9ada31eb8db067099bb6ff0ebfae0c5 Sep 30 20:33:44 crc kubenswrapper[4919]: I0930 20:33:44.518357 4919 generic.go:334] "Generic (PLEG): container finished" podID="df423b76-458b-49a2-94e0-cf51312f09a6" containerID="dab6eff410e1122f1e9ba38e4079c77326c7e04f9bebfddcd0de72b1b18713f5" exitCode=0 Sep 30 20:33:44 crc kubenswrapper[4919]: I0930 20:33:44.518556 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" event={"ID":"df423b76-458b-49a2-94e0-cf51312f09a6","Type":"ContainerDied","Data":"dab6eff410e1122f1e9ba38e4079c77326c7e04f9bebfddcd0de72b1b18713f5"} Sep 30 20:33:44 crc kubenswrapper[4919]: I0930 20:33:44.518807 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" event={"ID":"df423b76-458b-49a2-94e0-cf51312f09a6","Type":"ContainerStarted","Data":"d6a55336300d68c11dfb32508f993aedfc573f2141147eb77605a4540131bf3d"} Sep 30 20:33:44 crc kubenswrapper[4919]: I0930 20:33:44.528747 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w646s" event={"ID":"222832c8-8bfd-460e-ae09-5594896b36fc","Type":"ContainerStarted","Data":"5868f30b7f6ec022bfb9b8c885b8347dd98ab0699e4f6389db1ced28b11c70d5"} Sep 30 20:33:44 crc kubenswrapper[4919]: I0930 20:33:44.531313 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"52639b1d-3091-4729-acb6-cdc172e62912","Type":"ContainerStarted","Data":"c36a52fc76fa9e1c83aabbef7cd253d41609ae3a5b4be23eebe2e0d8a120fba5"} Sep 30 20:33:44 crc kubenswrapper[4919]: I0930 20:33:44.534903 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-54fj2" event={"ID":"1a83d10d-4a42-4177-b227-0da1b675c06b","Type":"ContainerStarted","Data":"52424be9789b974c4c772a9216bcff92b9ada31eb8db067099bb6ff0ebfae0c5"} Sep 30 20:33:44 crc kubenswrapper[4919]: I0930 20:33:44.543433 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1943a483-a0f9-4cb9-a16d-7c7acd604f6e","Type":"ContainerStarted","Data":"1506438a89cadb6479043bafc079d56497f4f5f5ce8f9eb177b9e9995fa0fdda"} Sep 30 20:33:44 crc kubenswrapper[4919]: I0930 20:33:44.553786 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dbd6a165-4eb6-4758-831e-01f3ef8f0b30","Type":"ContainerStarted","Data":"a3f238e9b564752622e442dd68c2cd06faa110ff4ddaf35d407ec0402072b092"} Sep 30 20:33:44 crc kubenswrapper[4919]: I0930 20:33:44.568975 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c36cbd0-636c-44e4-855a-9e407e31361e","Type":"ContainerStarted","Data":"dbf53af56ddcea2579a6f49d663ad94041b4e77eb791349a74018f5037003e51"} Sep 30 20:33:45 crc kubenswrapper[4919]: I0930 20:33:45.578315 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-54fj2" event={"ID":"1a83d10d-4a42-4177-b227-0da1b675c06b","Type":"ContainerStarted","Data":"f75d695c2a138e615cd32ddb4cf978d927b46768cd76449d794758368125c09d"} Sep 30 20:33:45 crc kubenswrapper[4919]: I0930 20:33:45.587543 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" event={"ID":"df423b76-458b-49a2-94e0-cf51312f09a6","Type":"ContainerStarted","Data":"5450878e4e9cde6fc7f510d20eab70bd7846d82a764214f69466f5d6fa3aefb8"} Sep 30 20:33:45 crc kubenswrapper[4919]: I0930 20:33:45.588283 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:45 crc kubenswrapper[4919]: I0930 20:33:45.601079 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-w646s" podStartSLOduration=3.601056314 podStartE2EDuration="3.601056314s" podCreationTimestamp="2025-09-30 20:33:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:33:44.568687414 +0000 UTC m=+1209.684720531" watchObservedRunningTime="2025-09-30 20:33:45.601056314 +0000 UTC m=+1210.717089441" Sep 30 20:33:45 crc kubenswrapper[4919]: I0930 20:33:45.602619 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-54fj2" podStartSLOduration=2.602601719 podStartE2EDuration="2.602601719s" podCreationTimestamp="2025-09-30 20:33:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:33:45.59916361 +0000 UTC m=+1210.715196737" watchObservedRunningTime="2025-09-30 20:33:45.602601719 +0000 UTC m=+1210.718634846" Sep 30 20:33:45 crc kubenswrapper[4919]: I0930 20:33:45.641510 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" podStartSLOduration=3.6414919 podStartE2EDuration="3.6414919s" podCreationTimestamp="2025-09-30 20:33:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:33:45.632409368 +0000 UTC m=+1210.748442495" watchObservedRunningTime="2025-09-30 20:33:45.6414919 +0000 UTC m=+1210.757525027" Sep 30 20:33:45 crc kubenswrapper[4919]: I0930 20:33:45.991047 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:33:45 crc kubenswrapper[4919]: I0930 20:33:45.997748 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:47 crc kubenswrapper[4919]: I0930 20:33:47.622387 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1943a483-a0f9-4cb9-a16d-7c7acd604f6e","Type":"ContainerStarted","Data":"eb320233a3ec927d907f535e56af8a6eb69b453cbb9fae1dde91666e030ab03f"} Sep 30 20:33:47 crc kubenswrapper[4919]: I0930 20:33:47.627128 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c36cbd0-636c-44e4-855a-9e407e31361e","Type":"ContainerStarted","Data":"5248be8c9678cf75b910435823393b8144144bb14a7bb5e53bf3c0614d490337"} Sep 30 20:33:47 crc kubenswrapper[4919]: I0930 20:33:47.629715 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"52639b1d-3091-4729-acb6-cdc172e62912","Type":"ContainerStarted","Data":"6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88"} Sep 30 20:33:47 crc kubenswrapper[4919]: I0930 20:33:47.632592 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="dbd6a165-4eb6-4758-831e-01f3ef8f0b30" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37" gracePeriod=30 Sep 30 20:33:47 crc kubenswrapper[4919]: I0930 20:33:47.641870 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.464984955 podStartE2EDuration="5.641848894s" podCreationTimestamp="2025-09-30 20:33:42 +0000 UTC" firstStartedPulling="2025-09-30 20:33:44.019303037 +0000 UTC m=+1209.135336164" lastFinishedPulling="2025-09-30 20:33:47.196166976 +0000 UTC m=+1212.312200103" observedRunningTime="2025-09-30 20:33:47.639844296 +0000 UTC m=+1212.755877423" watchObservedRunningTime="2025-09-30 20:33:47.641848894 +0000 UTC m=+1212.757882021" Sep 30 20:33:47 crc kubenswrapper[4919]: I0930 20:33:47.646138 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dbd6a165-4eb6-4758-831e-01f3ef8f0b30","Type":"ContainerStarted","Data":"f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37"} Sep 30 20:33:47 crc kubenswrapper[4919]: I0930 20:33:47.659823 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.091336433 podStartE2EDuration="5.659804291s" podCreationTimestamp="2025-09-30 20:33:42 +0000 UTC" firstStartedPulling="2025-09-30 20:33:43.62395362 +0000 UTC m=+1208.739986747" lastFinishedPulling="2025-09-30 20:33:47.192421478 +0000 UTC m=+1212.308454605" observedRunningTime="2025-09-30 20:33:47.656417494 +0000 UTC m=+1212.772450621" watchObservedRunningTime="2025-09-30 20:33:47.659804291 +0000 UTC m=+1212.775837418" Sep 30 20:33:48 crc kubenswrapper[4919]: I0930 20:33:48.061186 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:33:48 crc kubenswrapper[4919]: I0930 20:33:48.236325 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 20:33:48 crc kubenswrapper[4919]: I0930 20:33:48.651005 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c36cbd0-636c-44e4-855a-9e407e31361e","Type":"ContainerStarted","Data":"8c7ff0a363b28206ddd30ee3a19f012450886a5f4f51516ef599d1e45f754eb8"} Sep 30 20:33:48 crc kubenswrapper[4919]: I0930 20:33:48.655273 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="52639b1d-3091-4729-acb6-cdc172e62912" containerName="nova-metadata-log" containerID="cri-o://6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88" gracePeriod=30 Sep 30 20:33:48 crc kubenswrapper[4919]: I0930 20:33:48.655387 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"52639b1d-3091-4729-acb6-cdc172e62912","Type":"ContainerStarted","Data":"097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548"} Sep 30 20:33:48 crc kubenswrapper[4919]: I0930 20:33:48.655443 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="52639b1d-3091-4729-acb6-cdc172e62912" containerName="nova-metadata-metadata" containerID="cri-o://097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548" gracePeriod=30 Sep 30 20:33:48 crc kubenswrapper[4919]: I0930 20:33:48.682493 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.016760981 podStartE2EDuration="6.682466192s" podCreationTimestamp="2025-09-30 20:33:42 +0000 UTC" firstStartedPulling="2025-09-30 20:33:43.525234624 +0000 UTC m=+1208.641267761" lastFinishedPulling="2025-09-30 20:33:47.190939845 +0000 UTC m=+1212.306972972" observedRunningTime="2025-09-30 20:33:48.676406437 +0000 UTC m=+1213.792439614" watchObservedRunningTime="2025-09-30 20:33:48.682466192 +0000 UTC m=+1213.798499329" Sep 30 20:33:48 crc kubenswrapper[4919]: I0930 20:33:48.721304 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.43159905 podStartE2EDuration="6.721274081s" podCreationTimestamp="2025-09-30 20:33:42 +0000 UTC" firstStartedPulling="2025-09-30 20:33:43.904342153 +0000 UTC m=+1209.020375280" lastFinishedPulling="2025-09-30 20:33:47.194017184 +0000 UTC m=+1212.310050311" observedRunningTime="2025-09-30 20:33:48.707196885 +0000 UTC m=+1213.823230012" watchObservedRunningTime="2025-09-30 20:33:48.721274081 +0000 UTC m=+1213.837307208" Sep 30 20:33:48 crc kubenswrapper[4919]: I0930 20:33:48.965290 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:33:48 crc kubenswrapper[4919]: I0930 20:33:48.965916 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="3643ae76-bfa6-4d35-94ad-fedfa85b1977" containerName="kube-state-metrics" containerID="cri-o://e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973" gracePeriod=30 Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.281005 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.393238 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52639b1d-3091-4729-acb6-cdc172e62912-combined-ca-bundle\") pod \"52639b1d-3091-4729-acb6-cdc172e62912\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.393319 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkrbq\" (UniqueName: \"kubernetes.io/projected/52639b1d-3091-4729-acb6-cdc172e62912-kube-api-access-vkrbq\") pod \"52639b1d-3091-4729-acb6-cdc172e62912\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.393420 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52639b1d-3091-4729-acb6-cdc172e62912-config-data\") pod \"52639b1d-3091-4729-acb6-cdc172e62912\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.393547 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52639b1d-3091-4729-acb6-cdc172e62912-logs\") pod \"52639b1d-3091-4729-acb6-cdc172e62912\" (UID: \"52639b1d-3091-4729-acb6-cdc172e62912\") " Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.394258 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52639b1d-3091-4729-acb6-cdc172e62912-logs" (OuterVolumeSpecName: "logs") pod "52639b1d-3091-4729-acb6-cdc172e62912" (UID: "52639b1d-3091-4729-acb6-cdc172e62912"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.406868 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52639b1d-3091-4729-acb6-cdc172e62912-kube-api-access-vkrbq" (OuterVolumeSpecName: "kube-api-access-vkrbq") pod "52639b1d-3091-4729-acb6-cdc172e62912" (UID: "52639b1d-3091-4729-acb6-cdc172e62912"). InnerVolumeSpecName "kube-api-access-vkrbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.425692 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52639b1d-3091-4729-acb6-cdc172e62912-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "52639b1d-3091-4729-acb6-cdc172e62912" (UID: "52639b1d-3091-4729-acb6-cdc172e62912"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.441180 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52639b1d-3091-4729-acb6-cdc172e62912-config-data" (OuterVolumeSpecName: "config-data") pod "52639b1d-3091-4729-acb6-cdc172e62912" (UID: "52639b1d-3091-4729-acb6-cdc172e62912"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.495585 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52639b1d-3091-4729-acb6-cdc172e62912-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.495620 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkrbq\" (UniqueName: \"kubernetes.io/projected/52639b1d-3091-4729-acb6-cdc172e62912-kube-api-access-vkrbq\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.495632 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52639b1d-3091-4729-acb6-cdc172e62912-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.495641 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52639b1d-3091-4729-acb6-cdc172e62912-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.514382 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.598464 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-768b9\" (UniqueName: \"kubernetes.io/projected/3643ae76-bfa6-4d35-94ad-fedfa85b1977-kube-api-access-768b9\") pod \"3643ae76-bfa6-4d35-94ad-fedfa85b1977\" (UID: \"3643ae76-bfa6-4d35-94ad-fedfa85b1977\") " Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.602118 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3643ae76-bfa6-4d35-94ad-fedfa85b1977-kube-api-access-768b9" (OuterVolumeSpecName: "kube-api-access-768b9") pod "3643ae76-bfa6-4d35-94ad-fedfa85b1977" (UID: "3643ae76-bfa6-4d35-94ad-fedfa85b1977"). InnerVolumeSpecName "kube-api-access-768b9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.691410 4919 generic.go:334] "Generic (PLEG): container finished" podID="52639b1d-3091-4729-acb6-cdc172e62912" containerID="097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548" exitCode=0 Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.691448 4919 generic.go:334] "Generic (PLEG): container finished" podID="52639b1d-3091-4729-acb6-cdc172e62912" containerID="6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88" exitCode=143 Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.691505 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"52639b1d-3091-4729-acb6-cdc172e62912","Type":"ContainerDied","Data":"097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548"} Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.691536 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"52639b1d-3091-4729-acb6-cdc172e62912","Type":"ContainerDied","Data":"6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88"} Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.691549 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"52639b1d-3091-4729-acb6-cdc172e62912","Type":"ContainerDied","Data":"c36a52fc76fa9e1c83aabbef7cd253d41609ae3a5b4be23eebe2e0d8a120fba5"} Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.691566 4919 scope.go:117] "RemoveContainer" containerID="097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.691714 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.707926 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-768b9\" (UniqueName: \"kubernetes.io/projected/3643ae76-bfa6-4d35-94ad-fedfa85b1977-kube-api-access-768b9\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.711741 4919 generic.go:334] "Generic (PLEG): container finished" podID="3643ae76-bfa6-4d35-94ad-fedfa85b1977" containerID="e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973" exitCode=2 Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.712164 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.712569 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3643ae76-bfa6-4d35-94ad-fedfa85b1977","Type":"ContainerDied","Data":"e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973"} Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.712599 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3643ae76-bfa6-4d35-94ad-fedfa85b1977","Type":"ContainerDied","Data":"070181727a325421ea86c8fa88a344d157590d43b7ae3db06115ba269f9e32dd"} Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.732846 4919 scope.go:117] "RemoveContainer" containerID="6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.756823 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.775788 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.781492 4919 scope.go:117] "RemoveContainer" containerID="097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548" Sep 30 20:33:49 crc kubenswrapper[4919]: E0930 20:33:49.787917 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548\": container with ID starting with 097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548 not found: ID does not exist" containerID="097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.787963 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548"} err="failed to get container status \"097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548\": rpc error: code = NotFound desc = could not find container \"097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548\": container with ID starting with 097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548 not found: ID does not exist" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.787988 4919 scope.go:117] "RemoveContainer" containerID="6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.795223 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:33:49 crc kubenswrapper[4919]: E0930 20:33:49.797129 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88\": container with ID starting with 6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88 not found: ID does not exist" containerID="6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.797167 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88"} err="failed to get container status \"6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88\": rpc error: code = NotFound desc = could not find container \"6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88\": container with ID starting with 6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88 not found: ID does not exist" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.797193 4919 scope.go:117] "RemoveContainer" containerID="097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.797682 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548"} err="failed to get container status \"097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548\": rpc error: code = NotFound desc = could not find container \"097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548\": container with ID starting with 097e346e3cdebb380da03ae5656a9a44104c16c0de1c3af6e9684bae27bcd548 not found: ID does not exist" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.797703 4919 scope.go:117] "RemoveContainer" containerID="6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.798833 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88"} err="failed to get container status \"6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88\": rpc error: code = NotFound desc = could not find container \"6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88\": container with ID starting with 6307b4b535c2dbf21b95291c4ffa7c0acbe859c733ed5f92b0c87ac3a459ef88 not found: ID does not exist" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.798853 4919 scope.go:117] "RemoveContainer" containerID="e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.818391 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.837450 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:49 crc kubenswrapper[4919]: E0930 20:33:49.837853 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52639b1d-3091-4729-acb6-cdc172e62912" containerName="nova-metadata-log" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.837865 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="52639b1d-3091-4729-acb6-cdc172e62912" containerName="nova-metadata-log" Sep 30 20:33:49 crc kubenswrapper[4919]: E0930 20:33:49.837895 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52639b1d-3091-4729-acb6-cdc172e62912" containerName="nova-metadata-metadata" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.837901 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="52639b1d-3091-4729-acb6-cdc172e62912" containerName="nova-metadata-metadata" Sep 30 20:33:49 crc kubenswrapper[4919]: E0930 20:33:49.837922 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3643ae76-bfa6-4d35-94ad-fedfa85b1977" containerName="kube-state-metrics" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.837928 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="3643ae76-bfa6-4d35-94ad-fedfa85b1977" containerName="kube-state-metrics" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.838083 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="52639b1d-3091-4729-acb6-cdc172e62912" containerName="nova-metadata-metadata" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.838105 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="52639b1d-3091-4729-acb6-cdc172e62912" containerName="nova-metadata-log" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.838120 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="3643ae76-bfa6-4d35-94ad-fedfa85b1977" containerName="kube-state-metrics" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.839002 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.842419 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.842640 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.857822 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.859906 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.864553 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.864744 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.867156 4919 scope.go:117] "RemoveContainer" containerID="e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973" Sep 30 20:33:49 crc kubenswrapper[4919]: E0930 20:33:49.869265 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973\": container with ID starting with e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973 not found: ID does not exist" containerID="e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.869296 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973"} err="failed to get container status \"e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973\": rpc error: code = NotFound desc = could not find container \"e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973\": container with ID starting with e8793360f1769c6ecd1450e053ec4dc2aa9d4d035ec6de8ecb6478f459c41973 not found: ID does not exist" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.878670 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.904117 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.915039 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-config-data\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.915094 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc7l7\" (UniqueName: \"kubernetes.io/projected/9cd34595-2de7-4140-acb0-4c1d24643d18-kube-api-access-hc7l7\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.915126 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " pod="openstack/kube-state-metrics-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.915153 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " pod="openstack/kube-state-metrics-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.915177 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " pod="openstack/kube-state-metrics-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.915234 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.915268 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdv7b\" (UniqueName: \"kubernetes.io/projected/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-api-access-tdv7b\") pod \"kube-state-metrics-0\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " pod="openstack/kube-state-metrics-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.915283 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9cd34595-2de7-4140-acb0-4c1d24643d18-logs\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:49 crc kubenswrapper[4919]: I0930 20:33:49.915318 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.017125 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-config-data\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.017173 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc7l7\" (UniqueName: \"kubernetes.io/projected/9cd34595-2de7-4140-acb0-4c1d24643d18-kube-api-access-hc7l7\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.017226 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " pod="openstack/kube-state-metrics-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.017255 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " pod="openstack/kube-state-metrics-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.017279 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " pod="openstack/kube-state-metrics-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.017318 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.017350 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdv7b\" (UniqueName: \"kubernetes.io/projected/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-api-access-tdv7b\") pod \"kube-state-metrics-0\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " pod="openstack/kube-state-metrics-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.017366 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9cd34595-2de7-4140-acb0-4c1d24643d18-logs\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.017399 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.018173 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9cd34595-2de7-4140-acb0-4c1d24643d18-logs\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.022462 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.023210 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " pod="openstack/kube-state-metrics-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.024567 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-config-data\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.025059 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.025262 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " pod="openstack/kube-state-metrics-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.025284 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " pod="openstack/kube-state-metrics-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.040262 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdv7b\" (UniqueName: \"kubernetes.io/projected/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-api-access-tdv7b\") pod \"kube-state-metrics-0\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " pod="openstack/kube-state-metrics-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.044408 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc7l7\" (UniqueName: \"kubernetes.io/projected/9cd34595-2de7-4140-acb0-4c1d24643d18-kube-api-access-hc7l7\") pod \"nova-metadata-0\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " pod="openstack/nova-metadata-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.204850 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.228800 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.742005 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:50 crc kubenswrapper[4919]: W0930 20:33:50.754571 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9cd34595_2de7_4140_acb0_4c1d24643d18.slice/crio-c3d111b1cd672584de71e3868da90a5285328d8df72c6cc19cddedfbafbbcff2 WatchSource:0}: Error finding container c3d111b1cd672584de71e3868da90a5285328d8df72c6cc19cddedfbafbbcff2: Status 404 returned error can't find the container with id c3d111b1cd672584de71e3868da90a5285328d8df72c6cc19cddedfbafbbcff2 Sep 30 20:33:50 crc kubenswrapper[4919]: I0930 20:33:50.847361 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 20:33:50 crc kubenswrapper[4919]: W0930 20:33:50.852070 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf763f18_f1aa_442d_aaac_a6b4353ce21b.slice/crio-1849cc5002c5d74d89f45b4ef2347343e2e06f08264ccd3ddebd2f4c657f2c62 WatchSource:0}: Error finding container 1849cc5002c5d74d89f45b4ef2347343e2e06f08264ccd3ddebd2f4c657f2c62: Status 404 returned error can't find the container with id 1849cc5002c5d74d89f45b4ef2347343e2e06f08264ccd3ddebd2f4c657f2c62 Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.647256 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3643ae76-bfa6-4d35-94ad-fedfa85b1977" path="/var/lib/kubelet/pods/3643ae76-bfa6-4d35-94ad-fedfa85b1977/volumes" Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.648903 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52639b1d-3091-4729-acb6-cdc172e62912" path="/var/lib/kubelet/pods/52639b1d-3091-4729-acb6-cdc172e62912/volumes" Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.740297 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9cd34595-2de7-4140-acb0-4c1d24643d18","Type":"ContainerStarted","Data":"cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236"} Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.740387 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9cd34595-2de7-4140-acb0-4c1d24643d18","Type":"ContainerStarted","Data":"e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee"} Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.740411 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9cd34595-2de7-4140-acb0-4c1d24643d18","Type":"ContainerStarted","Data":"c3d111b1cd672584de71e3868da90a5285328d8df72c6cc19cddedfbafbbcff2"} Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.748753 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"af763f18-f1aa-442d-aaac-a6b4353ce21b","Type":"ContainerStarted","Data":"65c27ca22994224f5341e4465b1ee63843a9e525bf4b7f7e811192722b1739a9"} Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.748834 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"af763f18-f1aa-442d-aaac-a6b4353ce21b","Type":"ContainerStarted","Data":"1849cc5002c5d74d89f45b4ef2347343e2e06f08264ccd3ddebd2f4c657f2c62"} Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.748863 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.751653 4919 generic.go:334] "Generic (PLEG): container finished" podID="222832c8-8bfd-460e-ae09-5594896b36fc" containerID="5868f30b7f6ec022bfb9b8c885b8347dd98ab0699e4f6389db1ced28b11c70d5" exitCode=0 Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.751700 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w646s" event={"ID":"222832c8-8bfd-460e-ae09-5594896b36fc","Type":"ContainerDied","Data":"5868f30b7f6ec022bfb9b8c885b8347dd98ab0699e4f6389db1ced28b11c70d5"} Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.813819 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.814171 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="ceilometer-central-agent" containerID="cri-o://8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e" gracePeriod=30 Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.814195 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="sg-core" containerID="cri-o://dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486" gracePeriod=30 Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.814195 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="proxy-httpd" containerID="cri-o://985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc" gracePeriod=30 Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.814279 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="ceilometer-notification-agent" containerID="cri-o://3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1" gracePeriod=30 Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.830033 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.8300134359999998 podStartE2EDuration="2.830013436s" podCreationTimestamp="2025-09-30 20:33:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:33:51.772415606 +0000 UTC m=+1216.888448743" watchObservedRunningTime="2025-09-30 20:33:51.830013436 +0000 UTC m=+1216.946046563" Sep 30 20:33:51 crc kubenswrapper[4919]: I0930 20:33:51.845362 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.474956492 podStartE2EDuration="2.845339358s" podCreationTimestamp="2025-09-30 20:33:49 +0000 UTC" firstStartedPulling="2025-09-30 20:33:50.855086212 +0000 UTC m=+1215.971119339" lastFinishedPulling="2025-09-30 20:33:51.225469068 +0000 UTC m=+1216.341502205" observedRunningTime="2025-09-30 20:33:51.825425464 +0000 UTC m=+1216.941458591" watchObservedRunningTime="2025-09-30 20:33:51.845339358 +0000 UTC m=+1216.961372485" Sep 30 20:33:52 crc kubenswrapper[4919]: I0930 20:33:52.766144 4919 generic.go:334] "Generic (PLEG): container finished" podID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerID="985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc" exitCode=0 Sep 30 20:33:52 crc kubenswrapper[4919]: I0930 20:33:52.766508 4919 generic.go:334] "Generic (PLEG): container finished" podID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerID="dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486" exitCode=2 Sep 30 20:33:52 crc kubenswrapper[4919]: I0930 20:33:52.766520 4919 generic.go:334] "Generic (PLEG): container finished" podID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerID="8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e" exitCode=0 Sep 30 20:33:52 crc kubenswrapper[4919]: I0930 20:33:52.766316 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9b1898ef-3ec1-4e26-9332-a1402f89e08e","Type":"ContainerDied","Data":"985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc"} Sep 30 20:33:52 crc kubenswrapper[4919]: I0930 20:33:52.766593 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9b1898ef-3ec1-4e26-9332-a1402f89e08e","Type":"ContainerDied","Data":"dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486"} Sep 30 20:33:52 crc kubenswrapper[4919]: I0930 20:33:52.766634 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9b1898ef-3ec1-4e26-9332-a1402f89e08e","Type":"ContainerDied","Data":"8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e"} Sep 30 20:33:52 crc kubenswrapper[4919]: I0930 20:33:52.768570 4919 generic.go:334] "Generic (PLEG): container finished" podID="1a83d10d-4a42-4177-b227-0da1b675c06b" containerID="f75d695c2a138e615cd32ddb4cf978d927b46768cd76449d794758368125c09d" exitCode=0 Sep 30 20:33:52 crc kubenswrapper[4919]: I0930 20:33:52.768611 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-54fj2" event={"ID":"1a83d10d-4a42-4177-b227-0da1b675c06b","Type":"ContainerDied","Data":"f75d695c2a138e615cd32ddb4cf978d927b46768cd76449d794758368125c09d"} Sep 30 20:33:52 crc kubenswrapper[4919]: I0930 20:33:52.931535 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:33:52 crc kubenswrapper[4919]: I0930 20:33:52.931599 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.237110 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.275503 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.279073 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.331245 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.337409 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-mj85v"] Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.337642 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" podUID="c6714658-0275-4c06-952f-d84e5121bd9d" containerName="dnsmasq-dns" containerID="cri-o://ed46622a300dba9708a1f29da97c0bbd2aff1b4b843944464929611b34488b87" gracePeriod=10 Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.390604 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvh7h\" (UniqueName: \"kubernetes.io/projected/222832c8-8bfd-460e-ae09-5594896b36fc-kube-api-access-rvh7h\") pod \"222832c8-8bfd-460e-ae09-5594896b36fc\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.390660 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-combined-ca-bundle\") pod \"222832c8-8bfd-460e-ae09-5594896b36fc\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.390724 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-scripts\") pod \"222832c8-8bfd-460e-ae09-5594896b36fc\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.390789 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-config-data\") pod \"222832c8-8bfd-460e-ae09-5594896b36fc\" (UID: \"222832c8-8bfd-460e-ae09-5594896b36fc\") " Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.396433 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-scripts" (OuterVolumeSpecName: "scripts") pod "222832c8-8bfd-460e-ae09-5594896b36fc" (UID: "222832c8-8bfd-460e-ae09-5594896b36fc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.396521 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/222832c8-8bfd-460e-ae09-5594896b36fc-kube-api-access-rvh7h" (OuterVolumeSpecName: "kube-api-access-rvh7h") pod "222832c8-8bfd-460e-ae09-5594896b36fc" (UID: "222832c8-8bfd-460e-ae09-5594896b36fc"). InnerVolumeSpecName "kube-api-access-rvh7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.419332 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "222832c8-8bfd-460e-ae09-5594896b36fc" (UID: "222832c8-8bfd-460e-ae09-5594896b36fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.435319 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-config-data" (OuterVolumeSpecName: "config-data") pod "222832c8-8bfd-460e-ae09-5594896b36fc" (UID: "222832c8-8bfd-460e-ae09-5594896b36fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.493317 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.493351 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvh7h\" (UniqueName: \"kubernetes.io/projected/222832c8-8bfd-460e-ae09-5594896b36fc-kube-api-access-rvh7h\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.493361 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.493371 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/222832c8-8bfd-460e-ae09-5594896b36fc-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.786721 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-w646s" event={"ID":"222832c8-8bfd-460e-ae09-5594896b36fc","Type":"ContainerDied","Data":"758f177e5e7083ce0b6b3a31c4c35bbb59bb8f681cc62bdf6d6b97c6aabb1ffb"} Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.786967 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="758f177e5e7083ce0b6b3a31c4c35bbb59bb8f681cc62bdf6d6b97c6aabb1ffb" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.787027 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-w646s" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.789193 4919 generic.go:334] "Generic (PLEG): container finished" podID="c6714658-0275-4c06-952f-d84e5121bd9d" containerID="ed46622a300dba9708a1f29da97c0bbd2aff1b4b843944464929611b34488b87" exitCode=0 Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.789362 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" event={"ID":"c6714658-0275-4c06-952f-d84e5121bd9d","Type":"ContainerDied","Data":"ed46622a300dba9708a1f29da97c0bbd2aff1b4b843944464929611b34488b87"} Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.789391 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" event={"ID":"c6714658-0275-4c06-952f-d84e5121bd9d","Type":"ContainerDied","Data":"5ae98cee30cfa8541b7e466602d796c5f4ba599af5dd9dc15d32c51f94c99835"} Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.789404 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ae98cee30cfa8541b7e466602d796c5f4ba599af5dd9dc15d32c51f94c99835" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.823984 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 20:33:53 crc kubenswrapper[4919]: I0930 20:33:53.910864 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.012157 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-ovsdbserver-sb\") pod \"c6714658-0275-4c06-952f-d84e5121bd9d\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.012229 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-dns-svc\") pod \"c6714658-0275-4c06-952f-d84e5121bd9d\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.012254 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-config\") pod \"c6714658-0275-4c06-952f-d84e5121bd9d\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.012319 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g47kt\" (UniqueName: \"kubernetes.io/projected/c6714658-0275-4c06-952f-d84e5121bd9d-kube-api-access-g47kt\") pod \"c6714658-0275-4c06-952f-d84e5121bd9d\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.012355 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-dns-swift-storage-0\") pod \"c6714658-0275-4c06-952f-d84e5121bd9d\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.012374 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-ovsdbserver-nb\") pod \"c6714658-0275-4c06-952f-d84e5121bd9d\" (UID: \"c6714658-0275-4c06-952f-d84e5121bd9d\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.018510 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8c36cbd0-636c-44e4-855a-9e407e31361e" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.019158 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8c36cbd0-636c-44e4-855a-9e407e31361e" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.024009 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.024226 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8c36cbd0-636c-44e4-855a-9e407e31361e" containerName="nova-api-log" containerID="cri-o://5248be8c9678cf75b910435823393b8144144bb14a7bb5e53bf3c0614d490337" gracePeriod=30 Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.024597 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8c36cbd0-636c-44e4-855a-9e407e31361e" containerName="nova-api-api" containerID="cri-o://8c7ff0a363b28206ddd30ee3a19f012450886a5f4f51516ef599d1e45f754eb8" gracePeriod=30 Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.043289 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6714658-0275-4c06-952f-d84e5121bd9d-kube-api-access-g47kt" (OuterVolumeSpecName: "kube-api-access-g47kt") pod "c6714658-0275-4c06-952f-d84e5121bd9d" (UID: "c6714658-0275-4c06-952f-d84e5121bd9d"). InnerVolumeSpecName "kube-api-access-g47kt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.050561 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.050892 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="9cd34595-2de7-4140-acb0-4c1d24643d18" containerName="nova-metadata-log" containerID="cri-o://e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee" gracePeriod=30 Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.051228 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="9cd34595-2de7-4140-acb0-4c1d24643d18" containerName="nova-metadata-metadata" containerID="cri-o://cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236" gracePeriod=30 Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.114846 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g47kt\" (UniqueName: \"kubernetes.io/projected/c6714658-0275-4c06-952f-d84e5121bd9d-kube-api-access-g47kt\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.121771 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c6714658-0275-4c06-952f-d84e5121bd9d" (UID: "c6714658-0275-4c06-952f-d84e5121bd9d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.122590 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.149410 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c6714658-0275-4c06-952f-d84e5121bd9d" (UID: "c6714658-0275-4c06-952f-d84e5121bd9d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.169824 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-config" (OuterVolumeSpecName: "config") pod "c6714658-0275-4c06-952f-d84e5121bd9d" (UID: "c6714658-0275-4c06-952f-d84e5121bd9d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.171883 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c6714658-0275-4c06-952f-d84e5121bd9d" (UID: "c6714658-0275-4c06-952f-d84e5121bd9d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.172970 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c6714658-0275-4c06-952f-d84e5121bd9d" (UID: "c6714658-0275-4c06-952f-d84e5121bd9d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.216649 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-combined-ca-bundle\") pod \"1a83d10d-4a42-4177-b227-0da1b675c06b\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.216724 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-config-data\") pod \"1a83d10d-4a42-4177-b227-0da1b675c06b\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.216766 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cv5tn\" (UniqueName: \"kubernetes.io/projected/1a83d10d-4a42-4177-b227-0da1b675c06b-kube-api-access-cv5tn\") pod \"1a83d10d-4a42-4177-b227-0da1b675c06b\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.216956 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-scripts\") pod \"1a83d10d-4a42-4177-b227-0da1b675c06b\" (UID: \"1a83d10d-4a42-4177-b227-0da1b675c06b\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.217420 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.217436 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.217445 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.217454 4919 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.217463 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c6714658-0275-4c06-952f-d84e5121bd9d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.222003 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a83d10d-4a42-4177-b227-0da1b675c06b-kube-api-access-cv5tn" (OuterVolumeSpecName: "kube-api-access-cv5tn") pod "1a83d10d-4a42-4177-b227-0da1b675c06b" (UID: "1a83d10d-4a42-4177-b227-0da1b675c06b"). InnerVolumeSpecName "kube-api-access-cv5tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.224580 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-scripts" (OuterVolumeSpecName: "scripts") pod "1a83d10d-4a42-4177-b227-0da1b675c06b" (UID: "1a83d10d-4a42-4177-b227-0da1b675c06b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.252857 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-config-data" (OuterVolumeSpecName: "config-data") pod "1a83d10d-4a42-4177-b227-0da1b675c06b" (UID: "1a83d10d-4a42-4177-b227-0da1b675c06b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.254107 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1a83d10d-4a42-4177-b227-0da1b675c06b" (UID: "1a83d10d-4a42-4177-b227-0da1b675c06b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.324074 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.324107 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.324117 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cv5tn\" (UniqueName: \"kubernetes.io/projected/1a83d10d-4a42-4177-b227-0da1b675c06b-kube-api-access-cv5tn\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.324127 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a83d10d-4a42-4177-b227-0da1b675c06b-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.354361 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.624088 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.730650 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-nova-metadata-tls-certs\") pod \"9cd34595-2de7-4140-acb0-4c1d24643d18\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.730717 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-combined-ca-bundle\") pod \"9cd34595-2de7-4140-acb0-4c1d24643d18\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.730804 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-config-data\") pod \"9cd34595-2de7-4140-acb0-4c1d24643d18\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.730860 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hc7l7\" (UniqueName: \"kubernetes.io/projected/9cd34595-2de7-4140-acb0-4c1d24643d18-kube-api-access-hc7l7\") pod \"9cd34595-2de7-4140-acb0-4c1d24643d18\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.730968 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9cd34595-2de7-4140-acb0-4c1d24643d18-logs\") pod \"9cd34595-2de7-4140-acb0-4c1d24643d18\" (UID: \"9cd34595-2de7-4140-acb0-4c1d24643d18\") " Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.733011 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cd34595-2de7-4140-acb0-4c1d24643d18-logs" (OuterVolumeSpecName: "logs") pod "9cd34595-2de7-4140-acb0-4c1d24643d18" (UID: "9cd34595-2de7-4140-acb0-4c1d24643d18"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.758284 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9cd34595-2de7-4140-acb0-4c1d24643d18" (UID: "9cd34595-2de7-4140-acb0-4c1d24643d18"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.760220 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cd34595-2de7-4140-acb0-4c1d24643d18-kube-api-access-hc7l7" (OuterVolumeSpecName: "kube-api-access-hc7l7") pod "9cd34595-2de7-4140-acb0-4c1d24643d18" (UID: "9cd34595-2de7-4140-acb0-4c1d24643d18"). InnerVolumeSpecName "kube-api-access-hc7l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.775883 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-config-data" (OuterVolumeSpecName: "config-data") pod "9cd34595-2de7-4140-acb0-4c1d24643d18" (UID: "9cd34595-2de7-4140-acb0-4c1d24643d18"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.818841 4919 generic.go:334] "Generic (PLEG): container finished" podID="8c36cbd0-636c-44e4-855a-9e407e31361e" containerID="5248be8c9678cf75b910435823393b8144144bb14a7bb5e53bf3c0614d490337" exitCode=143 Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.818922 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c36cbd0-636c-44e4-855a-9e407e31361e","Type":"ContainerDied","Data":"5248be8c9678cf75b910435823393b8144144bb14a7bb5e53bf3c0614d490337"} Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.820503 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-54fj2" event={"ID":"1a83d10d-4a42-4177-b227-0da1b675c06b","Type":"ContainerDied","Data":"52424be9789b974c4c772a9216bcff92b9ada31eb8db067099bb6ff0ebfae0c5"} Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.820523 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52424be9789b974c4c772a9216bcff92b9ada31eb8db067099bb6ff0ebfae0c5" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.820672 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-54fj2" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.822793 4919 generic.go:334] "Generic (PLEG): container finished" podID="9cd34595-2de7-4140-acb0-4c1d24643d18" containerID="cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236" exitCode=0 Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.822823 4919 generic.go:334] "Generic (PLEG): container finished" podID="9cd34595-2de7-4140-acb0-4c1d24643d18" containerID="e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee" exitCode=143 Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.822914 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5784cf869f-mj85v" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.824366 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.824733 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9cd34595-2de7-4140-acb0-4c1d24643d18","Type":"ContainerDied","Data":"cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236"} Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.831724 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9cd34595-2de7-4140-acb0-4c1d24643d18","Type":"ContainerDied","Data":"e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee"} Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.831766 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9cd34595-2de7-4140-acb0-4c1d24643d18","Type":"ContainerDied","Data":"c3d111b1cd672584de71e3868da90a5285328d8df72c6cc19cddedfbafbbcff2"} Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.831807 4919 scope.go:117] "RemoveContainer" containerID="cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.836142 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.836420 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.836432 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hc7l7\" (UniqueName: \"kubernetes.io/projected/9cd34595-2de7-4140-acb0-4c1d24643d18-kube-api-access-hc7l7\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.836442 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9cd34595-2de7-4140-acb0-4c1d24643d18-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.842370 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "9cd34595-2de7-4140-acb0-4c1d24643d18" (UID: "9cd34595-2de7-4140-acb0-4c1d24643d18"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.868235 4919 scope.go:117] "RemoveContainer" containerID="e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.900406 4919 scope.go:117] "RemoveContainer" containerID="cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.907270 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 20:33:54 crc kubenswrapper[4919]: E0930 20:33:54.907723 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="222832c8-8bfd-460e-ae09-5594896b36fc" containerName="nova-manage" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.907738 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="222832c8-8bfd-460e-ae09-5594896b36fc" containerName="nova-manage" Sep 30 20:33:54 crc kubenswrapper[4919]: E0930 20:33:54.907761 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6714658-0275-4c06-952f-d84e5121bd9d" containerName="dnsmasq-dns" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.907767 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6714658-0275-4c06-952f-d84e5121bd9d" containerName="dnsmasq-dns" Sep 30 20:33:54 crc kubenswrapper[4919]: E0930 20:33:54.907777 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cd34595-2de7-4140-acb0-4c1d24643d18" containerName="nova-metadata-metadata" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.907784 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cd34595-2de7-4140-acb0-4c1d24643d18" containerName="nova-metadata-metadata" Sep 30 20:33:54 crc kubenswrapper[4919]: E0930 20:33:54.907794 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a83d10d-4a42-4177-b227-0da1b675c06b" containerName="nova-cell1-conductor-db-sync" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.907801 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a83d10d-4a42-4177-b227-0da1b675c06b" containerName="nova-cell1-conductor-db-sync" Sep 30 20:33:54 crc kubenswrapper[4919]: E0930 20:33:54.907814 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6714658-0275-4c06-952f-d84e5121bd9d" containerName="init" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.907821 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6714658-0275-4c06-952f-d84e5121bd9d" containerName="init" Sep 30 20:33:54 crc kubenswrapper[4919]: E0930 20:33:54.907832 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cd34595-2de7-4140-acb0-4c1d24643d18" containerName="nova-metadata-log" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.907839 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cd34595-2de7-4140-acb0-4c1d24643d18" containerName="nova-metadata-log" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.908011 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cd34595-2de7-4140-acb0-4c1d24643d18" containerName="nova-metadata-metadata" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.908022 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a83d10d-4a42-4177-b227-0da1b675c06b" containerName="nova-cell1-conductor-db-sync" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.908037 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cd34595-2de7-4140-acb0-4c1d24643d18" containerName="nova-metadata-log" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.908044 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6714658-0275-4c06-952f-d84e5121bd9d" containerName="dnsmasq-dns" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.908056 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="222832c8-8bfd-460e-ae09-5594896b36fc" containerName="nova-manage" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.908695 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 20:33:54 crc kubenswrapper[4919]: E0930 20:33:54.912705 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236\": container with ID starting with cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236 not found: ID does not exist" containerID="cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.912752 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236"} err="failed to get container status \"cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236\": rpc error: code = NotFound desc = could not find container \"cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236\": container with ID starting with cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236 not found: ID does not exist" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.912783 4919 scope.go:117] "RemoveContainer" containerID="e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.912986 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 30 20:33:54 crc kubenswrapper[4919]: E0930 20:33:54.913109 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee\": container with ID starting with e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee not found: ID does not exist" containerID="e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.913162 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee"} err="failed to get container status \"e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee\": rpc error: code = NotFound desc = could not find container \"e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee\": container with ID starting with e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee not found: ID does not exist" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.913180 4919 scope.go:117] "RemoveContainer" containerID="cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.918239 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.920365 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-mj85v"] Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.922345 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236"} err="failed to get container status \"cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236\": rpc error: code = NotFound desc = could not find container \"cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236\": container with ID starting with cd3f0951013773bfb6f1cd7c061f5f1f09ba9f33ca6e9a3e4164fe50f5b9a236 not found: ID does not exist" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.922396 4919 scope.go:117] "RemoveContainer" containerID="e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.922894 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee"} err="failed to get container status \"e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee\": rpc error: code = NotFound desc = could not find container \"e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee\": container with ID starting with e6954192c0eea2f8315c36fe8af7dcbd642becee665a2c4ce540b6d5f3557dee not found: ID does not exist" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.927077 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5784cf869f-mj85v"] Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.937846 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjvg8\" (UniqueName: \"kubernetes.io/projected/6abf178b-97a5-488a-a31e-7af9a7c0a710-kube-api-access-sjvg8\") pod \"nova-cell1-conductor-0\" (UID: \"6abf178b-97a5-488a-a31e-7af9a7c0a710\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.938062 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6abf178b-97a5-488a-a31e-7af9a7c0a710-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6abf178b-97a5-488a-a31e-7af9a7c0a710\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.938173 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6abf178b-97a5-488a-a31e-7af9a7c0a710-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6abf178b-97a5-488a-a31e-7af9a7c0a710\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:33:54 crc kubenswrapper[4919]: I0930 20:33:54.938396 4919 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9cd34595-2de7-4140-acb0-4c1d24643d18-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.039453 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6abf178b-97a5-488a-a31e-7af9a7c0a710-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6abf178b-97a5-488a-a31e-7af9a7c0a710\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.039567 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjvg8\" (UniqueName: \"kubernetes.io/projected/6abf178b-97a5-488a-a31e-7af9a7c0a710-kube-api-access-sjvg8\") pod \"nova-cell1-conductor-0\" (UID: \"6abf178b-97a5-488a-a31e-7af9a7c0a710\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.039629 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6abf178b-97a5-488a-a31e-7af9a7c0a710-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6abf178b-97a5-488a-a31e-7af9a7c0a710\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.044201 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6abf178b-97a5-488a-a31e-7af9a7c0a710-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"6abf178b-97a5-488a-a31e-7af9a7c0a710\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.044515 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6abf178b-97a5-488a-a31e-7af9a7c0a710-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"6abf178b-97a5-488a-a31e-7af9a7c0a710\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.054655 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjvg8\" (UniqueName: \"kubernetes.io/projected/6abf178b-97a5-488a-a31e-7af9a7c0a710-kube-api-access-sjvg8\") pod \"nova-cell1-conductor-0\" (UID: \"6abf178b-97a5-488a-a31e-7af9a7c0a710\") " pod="openstack/nova-cell1-conductor-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.164387 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.174404 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.186997 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.188429 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.191998 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.192425 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.222862 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.240191 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.245488 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-config-data\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.245534 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g44f\" (UniqueName: \"kubernetes.io/projected/c335a9c2-3185-4ade-8048-c2dc570a2961-kube-api-access-4g44f\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.245553 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c335a9c2-3185-4ade-8048-c2dc570a2961-logs\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.245602 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.245631 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.347058 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.347101 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.347229 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-config-data\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.347249 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g44f\" (UniqueName: \"kubernetes.io/projected/c335a9c2-3185-4ade-8048-c2dc570a2961-kube-api-access-4g44f\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.347263 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c335a9c2-3185-4ade-8048-c2dc570a2961-logs\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.347631 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c335a9c2-3185-4ade-8048-c2dc570a2961-logs\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.351322 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.359796 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-config-data\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.360845 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.375538 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g44f\" (UniqueName: \"kubernetes.io/projected/c335a9c2-3185-4ade-8048-c2dc570a2961-kube-api-access-4g44f\") pod \"nova-metadata-0\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.514169 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.663643 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cd34595-2de7-4140-acb0-4c1d24643d18" path="/var/lib/kubelet/pods/9cd34595-2de7-4140-acb0-4c1d24643d18/volumes" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.664453 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6714658-0275-4c06-952f-d84e5121bd9d" path="/var/lib/kubelet/pods/c6714658-0275-4c06-952f-d84e5121bd9d/volumes" Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.677507 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.835252 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6abf178b-97a5-488a-a31e-7af9a7c0a710","Type":"ContainerStarted","Data":"6dfcc33c8cbf39b3cf8e7e9f7535841628dc460f19d53058618d2bec8a400d0f"} Sep 30 20:33:55 crc kubenswrapper[4919]: I0930 20:33:55.835352 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="1943a483-a0f9-4cb9-a16d-7c7acd604f6e" containerName="nova-scheduler-scheduler" containerID="cri-o://eb320233a3ec927d907f535e56af8a6eb69b453cbb9fae1dde91666e030ab03f" gracePeriod=30 Sep 30 20:33:56 crc kubenswrapper[4919]: I0930 20:33:56.048779 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:33:56 crc kubenswrapper[4919]: W0930 20:33:56.052451 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc335a9c2_3185_4ade_8048_c2dc570a2961.slice/crio-472ff519998969de9f8921f801d551d5b645511d9812fad24b0ecaec0b09a9a2 WatchSource:0}: Error finding container 472ff519998969de9f8921f801d551d5b645511d9812fad24b0ecaec0b09a9a2: Status 404 returned error can't find the container with id 472ff519998969de9f8921f801d551d5b645511d9812fad24b0ecaec0b09a9a2 Sep 30 20:33:56 crc kubenswrapper[4919]: I0930 20:33:56.846466 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c335a9c2-3185-4ade-8048-c2dc570a2961","Type":"ContainerStarted","Data":"585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628"} Sep 30 20:33:56 crc kubenswrapper[4919]: I0930 20:33:56.846743 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c335a9c2-3185-4ade-8048-c2dc570a2961","Type":"ContainerStarted","Data":"769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2"} Sep 30 20:33:56 crc kubenswrapper[4919]: I0930 20:33:56.846754 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c335a9c2-3185-4ade-8048-c2dc570a2961","Type":"ContainerStarted","Data":"472ff519998969de9f8921f801d551d5b645511d9812fad24b0ecaec0b09a9a2"} Sep 30 20:33:56 crc kubenswrapper[4919]: I0930 20:33:56.861975 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"6abf178b-97a5-488a-a31e-7af9a7c0a710","Type":"ContainerStarted","Data":"214bee5fe3c634c6f2cc44b2ac578c962a6bde4c020a9840e3370048b081ecee"} Sep 30 20:33:56 crc kubenswrapper[4919]: I0930 20:33:56.863044 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 30 20:33:56 crc kubenswrapper[4919]: I0930 20:33:56.885053 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.885030937 podStartE2EDuration="1.885030937s" podCreationTimestamp="2025-09-30 20:33:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:33:56.873486165 +0000 UTC m=+1221.989519312" watchObservedRunningTime="2025-09-30 20:33:56.885030937 +0000 UTC m=+1222.001064064" Sep 30 20:33:56 crc kubenswrapper[4919]: I0930 20:33:56.904960 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.904936781 podStartE2EDuration="2.904936781s" podCreationTimestamp="2025-09-30 20:33:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:33:56.895584082 +0000 UTC m=+1222.011617209" watchObservedRunningTime="2025-09-30 20:33:56.904936781 +0000 UTC m=+1222.020969918" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.302505 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.411420 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-combined-ca-bundle\") pod \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.411504 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b1898ef-3ec1-4e26-9332-a1402f89e08e-run-httpd\") pod \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.411549 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-scripts\") pod \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.411600 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-sg-core-conf-yaml\") pod \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.411680 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwhcr\" (UniqueName: \"kubernetes.io/projected/9b1898ef-3ec1-4e26-9332-a1402f89e08e-kube-api-access-xwhcr\") pod \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.411783 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-config-data\") pod \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.411833 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b1898ef-3ec1-4e26-9332-a1402f89e08e-log-httpd\") pod \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\" (UID: \"9b1898ef-3ec1-4e26-9332-a1402f89e08e\") " Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.412889 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b1898ef-3ec1-4e26-9332-a1402f89e08e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9b1898ef-3ec1-4e26-9332-a1402f89e08e" (UID: "9b1898ef-3ec1-4e26-9332-a1402f89e08e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.413986 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b1898ef-3ec1-4e26-9332-a1402f89e08e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9b1898ef-3ec1-4e26-9332-a1402f89e08e" (UID: "9b1898ef-3ec1-4e26-9332-a1402f89e08e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.415812 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-scripts" (OuterVolumeSpecName: "scripts") pod "9b1898ef-3ec1-4e26-9332-a1402f89e08e" (UID: "9b1898ef-3ec1-4e26-9332-a1402f89e08e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.419323 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b1898ef-3ec1-4e26-9332-a1402f89e08e-kube-api-access-xwhcr" (OuterVolumeSpecName: "kube-api-access-xwhcr") pod "9b1898ef-3ec1-4e26-9332-a1402f89e08e" (UID: "9b1898ef-3ec1-4e26-9332-a1402f89e08e"). InnerVolumeSpecName "kube-api-access-xwhcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.437387 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9b1898ef-3ec1-4e26-9332-a1402f89e08e" (UID: "9b1898ef-3ec1-4e26-9332-a1402f89e08e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.515376 4919 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.515412 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwhcr\" (UniqueName: \"kubernetes.io/projected/9b1898ef-3ec1-4e26-9332-a1402f89e08e-kube-api-access-xwhcr\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.515424 4919 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b1898ef-3ec1-4e26-9332-a1402f89e08e-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.515432 4919 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b1898ef-3ec1-4e26-9332-a1402f89e08e-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.515440 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.532584 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b1898ef-3ec1-4e26-9332-a1402f89e08e" (UID: "9b1898ef-3ec1-4e26-9332-a1402f89e08e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.540053 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-config-data" (OuterVolumeSpecName: "config-data") pod "9b1898ef-3ec1-4e26-9332-a1402f89e08e" (UID: "9b1898ef-3ec1-4e26-9332-a1402f89e08e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.616807 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.616850 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b1898ef-3ec1-4e26-9332-a1402f89e08e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.874699 4919 generic.go:334] "Generic (PLEG): container finished" podID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerID="3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1" exitCode=0 Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.874766 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.874760 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9b1898ef-3ec1-4e26-9332-a1402f89e08e","Type":"ContainerDied","Data":"3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1"} Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.874969 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9b1898ef-3ec1-4e26-9332-a1402f89e08e","Type":"ContainerDied","Data":"ab06966c29690897bf0456896b7eb28dacee10e4acdd7e787af8b00ff8dd398c"} Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.874995 4919 scope.go:117] "RemoveContainer" containerID="985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.901962 4919 scope.go:117] "RemoveContainer" containerID="dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.917893 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.936480 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.957820 4919 scope.go:117] "RemoveContainer" containerID="3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.962920 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:33:57 crc kubenswrapper[4919]: E0930 20:33:57.963404 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="ceilometer-notification-agent" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.963420 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="ceilometer-notification-agent" Sep 30 20:33:57 crc kubenswrapper[4919]: E0930 20:33:57.963440 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="sg-core" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.963449 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="sg-core" Sep 30 20:33:57 crc kubenswrapper[4919]: E0930 20:33:57.963477 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="ceilometer-central-agent" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.963485 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="ceilometer-central-agent" Sep 30 20:33:57 crc kubenswrapper[4919]: E0930 20:33:57.963513 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="proxy-httpd" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.963523 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="proxy-httpd" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.963790 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="ceilometer-central-agent" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.963810 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="ceilometer-notification-agent" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.963824 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="proxy-httpd" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.963843 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" containerName="sg-core" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.966192 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.974366 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.979465 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.979639 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.980517 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:33:57 crc kubenswrapper[4919]: I0930 20:33:57.994520 4919 scope.go:117] "RemoveContainer" containerID="8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.020487 4919 scope.go:117] "RemoveContainer" containerID="985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc" Sep 30 20:33:58 crc kubenswrapper[4919]: E0930 20:33:58.021271 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc\": container with ID starting with 985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc not found: ID does not exist" containerID="985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.021308 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc"} err="failed to get container status \"985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc\": rpc error: code = NotFound desc = could not find container \"985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc\": container with ID starting with 985387de537ab2986794e1176812a53c904ba005af2badfa45028bac4b23babc not found: ID does not exist" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.021335 4919 scope.go:117] "RemoveContainer" containerID="dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486" Sep 30 20:33:58 crc kubenswrapper[4919]: E0930 20:33:58.021622 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486\": container with ID starting with dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486 not found: ID does not exist" containerID="dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.021643 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486"} err="failed to get container status \"dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486\": rpc error: code = NotFound desc = could not find container \"dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486\": container with ID starting with dffc2a227089185966ef2561c245c7194b3e5904646b00d2a371494c6c4c2486 not found: ID does not exist" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.021656 4919 scope.go:117] "RemoveContainer" containerID="3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1" Sep 30 20:33:58 crc kubenswrapper[4919]: E0930 20:33:58.021903 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1\": container with ID starting with 3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1 not found: ID does not exist" containerID="3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.021918 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1"} err="failed to get container status \"3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1\": rpc error: code = NotFound desc = could not find container \"3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1\": container with ID starting with 3ee891c1258d21b2354bc557810584f2fd4699b5c8906aed681787407d484ad1 not found: ID does not exist" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.021930 4919 scope.go:117] "RemoveContainer" containerID="8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e" Sep 30 20:33:58 crc kubenswrapper[4919]: E0930 20:33:58.022087 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e\": container with ID starting with 8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e not found: ID does not exist" containerID="8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.022101 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e"} err="failed to get container status \"8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e\": rpc error: code = NotFound desc = could not find container \"8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e\": container with ID starting with 8002a6a6f1afa65558d2c6037edc62404795f67300e320679dd50ede36ef7a6e not found: ID does not exist" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.024108 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3162c33a-7a1b-45ce-9e03-65573dde1865-run-httpd\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.024171 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.024200 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.024299 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3162c33a-7a1b-45ce-9e03-65573dde1865-log-httpd\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.024369 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.024410 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-scripts\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.024493 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dvmz\" (UniqueName: \"kubernetes.io/projected/3162c33a-7a1b-45ce-9e03-65573dde1865-kube-api-access-8dvmz\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.024518 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-config-data\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.126529 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.126583 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-scripts\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.126635 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dvmz\" (UniqueName: \"kubernetes.io/projected/3162c33a-7a1b-45ce-9e03-65573dde1865-kube-api-access-8dvmz\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.126656 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-config-data\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.126687 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3162c33a-7a1b-45ce-9e03-65573dde1865-run-httpd\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.126712 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.126731 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.126782 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3162c33a-7a1b-45ce-9e03-65573dde1865-log-httpd\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.127257 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3162c33a-7a1b-45ce-9e03-65573dde1865-log-httpd\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.127563 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3162c33a-7a1b-45ce-9e03-65573dde1865-run-httpd\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.131370 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.131996 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-scripts\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.132289 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-config-data\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.133720 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.139716 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.146734 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dvmz\" (UniqueName: \"kubernetes.io/projected/3162c33a-7a1b-45ce-9e03-65573dde1865-kube-api-access-8dvmz\") pod \"ceilometer-0\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: E0930 20:33:58.238372 4919 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="eb320233a3ec927d907f535e56af8a6eb69b453cbb9fae1dde91666e030ab03f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 20:33:58 crc kubenswrapper[4919]: E0930 20:33:58.242601 4919 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="eb320233a3ec927d907f535e56af8a6eb69b453cbb9fae1dde91666e030ab03f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 20:33:58 crc kubenswrapper[4919]: E0930 20:33:58.244388 4919 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="eb320233a3ec927d907f535e56af8a6eb69b453cbb9fae1dde91666e030ab03f" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 30 20:33:58 crc kubenswrapper[4919]: E0930 20:33:58.244432 4919 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="1943a483-a0f9-4cb9-a16d-7c7acd604f6e" containerName="nova-scheduler-scheduler" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.301281 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.768361 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:33:58 crc kubenswrapper[4919]: I0930 20:33:58.883937 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3162c33a-7a1b-45ce-9e03-65573dde1865","Type":"ContainerStarted","Data":"47d6c6d01acf68b88558b34ec48240d3d0f7ff1f9d49e50ecdd4cdc843e98e58"} Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.656435 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b1898ef-3ec1-4e26-9332-a1402f89e08e" path="/var/lib/kubelet/pods/9b1898ef-3ec1-4e26-9332-a1402f89e08e/volumes" Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.908095 4919 generic.go:334] "Generic (PLEG): container finished" podID="8c36cbd0-636c-44e4-855a-9e407e31361e" containerID="8c7ff0a363b28206ddd30ee3a19f012450886a5f4f51516ef599d1e45f754eb8" exitCode=0 Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.908292 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c36cbd0-636c-44e4-855a-9e407e31361e","Type":"ContainerDied","Data":"8c7ff0a363b28206ddd30ee3a19f012450886a5f4f51516ef599d1e45f754eb8"} Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.908354 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8c36cbd0-636c-44e4-855a-9e407e31361e","Type":"ContainerDied","Data":"dbf53af56ddcea2579a6f49d663ad94041b4e77eb791349a74018f5037003e51"} Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.908368 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbf53af56ddcea2579a6f49d663ad94041b4e77eb791349a74018f5037003e51" Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.910168 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3162c33a-7a1b-45ce-9e03-65573dde1865","Type":"ContainerStarted","Data":"3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5"} Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.913831 4919 generic.go:334] "Generic (PLEG): container finished" podID="1943a483-a0f9-4cb9-a16d-7c7acd604f6e" containerID="eb320233a3ec927d907f535e56af8a6eb69b453cbb9fae1dde91666e030ab03f" exitCode=0 Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.913873 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1943a483-a0f9-4cb9-a16d-7c7acd604f6e","Type":"ContainerDied","Data":"eb320233a3ec927d907f535e56af8a6eb69b453cbb9fae1dde91666e030ab03f"} Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.918006 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.968652 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c36cbd0-636c-44e4-855a-9e407e31361e-combined-ca-bundle\") pod \"8c36cbd0-636c-44e4-855a-9e407e31361e\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.968740 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwvcj\" (UniqueName: \"kubernetes.io/projected/8c36cbd0-636c-44e4-855a-9e407e31361e-kube-api-access-zwvcj\") pod \"8c36cbd0-636c-44e4-855a-9e407e31361e\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.968812 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c36cbd0-636c-44e4-855a-9e407e31361e-logs\") pod \"8c36cbd0-636c-44e4-855a-9e407e31361e\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.968877 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c36cbd0-636c-44e4-855a-9e407e31361e-config-data\") pod \"8c36cbd0-636c-44e4-855a-9e407e31361e\" (UID: \"8c36cbd0-636c-44e4-855a-9e407e31361e\") " Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.970928 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c36cbd0-636c-44e4-855a-9e407e31361e-logs" (OuterVolumeSpecName: "logs") pod "8c36cbd0-636c-44e4-855a-9e407e31361e" (UID: "8c36cbd0-636c-44e4-855a-9e407e31361e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:33:59 crc kubenswrapper[4919]: I0930 20:33:59.979726 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c36cbd0-636c-44e4-855a-9e407e31361e-kube-api-access-zwvcj" (OuterVolumeSpecName: "kube-api-access-zwvcj") pod "8c36cbd0-636c-44e4-855a-9e407e31361e" (UID: "8c36cbd0-636c-44e4-855a-9e407e31361e"). InnerVolumeSpecName "kube-api-access-zwvcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.024676 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c36cbd0-636c-44e4-855a-9e407e31361e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c36cbd0-636c-44e4-855a-9e407e31361e" (UID: "8c36cbd0-636c-44e4-855a-9e407e31361e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.024780 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c36cbd0-636c-44e4-855a-9e407e31361e-config-data" (OuterVolumeSpecName: "config-data") pod "8c36cbd0-636c-44e4-855a-9e407e31361e" (UID: "8c36cbd0-636c-44e4-855a-9e407e31361e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.056583 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.071582 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c36cbd0-636c-44e4-855a-9e407e31361e-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.074223 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c36cbd0-636c-44e4-855a-9e407e31361e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.074246 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c36cbd0-636c-44e4-855a-9e407e31361e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.074262 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwvcj\" (UniqueName: \"kubernetes.io/projected/8c36cbd0-636c-44e4-855a-9e407e31361e-kube-api-access-zwvcj\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.175270 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scskv\" (UniqueName: \"kubernetes.io/projected/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-kube-api-access-scskv\") pod \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\" (UID: \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\") " Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.175333 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-combined-ca-bundle\") pod \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\" (UID: \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\") " Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.175370 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-config-data\") pod \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\" (UID: \"1943a483-a0f9-4cb9-a16d-7c7acd604f6e\") " Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.179746 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-kube-api-access-scskv" (OuterVolumeSpecName: "kube-api-access-scskv") pod "1943a483-a0f9-4cb9-a16d-7c7acd604f6e" (UID: "1943a483-a0f9-4cb9-a16d-7c7acd604f6e"). InnerVolumeSpecName "kube-api-access-scskv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.211179 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1943a483-a0f9-4cb9-a16d-7c7acd604f6e" (UID: "1943a483-a0f9-4cb9-a16d-7c7acd604f6e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.219585 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-config-data" (OuterVolumeSpecName: "config-data") pod "1943a483-a0f9-4cb9-a16d-7c7acd604f6e" (UID: "1943a483-a0f9-4cb9-a16d-7c7acd604f6e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.241040 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.278629 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scskv\" (UniqueName: \"kubernetes.io/projected/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-kube-api-access-scskv\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.278665 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.278678 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1943a483-a0f9-4cb9-a16d-7c7acd604f6e-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.283227 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.514563 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.515495 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.925423 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3162c33a-7a1b-45ce-9e03-65573dde1865","Type":"ContainerStarted","Data":"292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1"} Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.926926 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.935910 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.938120 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1943a483-a0f9-4cb9-a16d-7c7acd604f6e","Type":"ContainerDied","Data":"1506438a89cadb6479043bafc079d56497f4f5f5ce8f9eb177b9e9995fa0fdda"} Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.938195 4919 scope.go:117] "RemoveContainer" containerID="eb320233a3ec927d907f535e56af8a6eb69b453cbb9fae1dde91666e030ab03f" Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.984674 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:00 crc kubenswrapper[4919]: I0930 20:34:00.996776 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.014763 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:01 crc kubenswrapper[4919]: E0930 20:34:01.015304 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c36cbd0-636c-44e4-855a-9e407e31361e" containerName="nova-api-api" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.015325 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c36cbd0-636c-44e4-855a-9e407e31361e" containerName="nova-api-api" Sep 30 20:34:01 crc kubenswrapper[4919]: E0930 20:34:01.015350 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1943a483-a0f9-4cb9-a16d-7c7acd604f6e" containerName="nova-scheduler-scheduler" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.015359 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="1943a483-a0f9-4cb9-a16d-7c7acd604f6e" containerName="nova-scheduler-scheduler" Sep 30 20:34:01 crc kubenswrapper[4919]: E0930 20:34:01.015405 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c36cbd0-636c-44e4-855a-9e407e31361e" containerName="nova-api-log" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.015414 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c36cbd0-636c-44e4-855a-9e407e31361e" containerName="nova-api-log" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.015667 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c36cbd0-636c-44e4-855a-9e407e31361e" containerName="nova-api-log" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.015712 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c36cbd0-636c-44e4-855a-9e407e31361e" containerName="nova-api-api" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.015731 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="1943a483-a0f9-4cb9-a16d-7c7acd604f6e" containerName="nova-scheduler-scheduler" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.016898 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.032266 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.032324 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.033883 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.042048 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.056460 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.057830 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.062514 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.062760 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.095921 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/928b80de-c061-4cde-a6e6-b6ac1e28134a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.096308 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/928b80de-c061-4cde-a6e6-b6ac1e28134a-logs\") pod \"nova-api-0\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.096360 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/928b80de-c061-4cde-a6e6-b6ac1e28134a-config-data\") pod \"nova-api-0\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.096495 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g72zn\" (UniqueName: \"kubernetes.io/projected/928b80de-c061-4cde-a6e6-b6ac1e28134a-kube-api-access-g72zn\") pod \"nova-api-0\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.197899 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/928b80de-c061-4cde-a6e6-b6ac1e28134a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.197971 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c57561-b8c9-4158-bdd8-d782214a7549-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"05c57561-b8c9-4158-bdd8-d782214a7549\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.198021 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/928b80de-c061-4cde-a6e6-b6ac1e28134a-logs\") pod \"nova-api-0\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.198054 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2m2l\" (UniqueName: \"kubernetes.io/projected/05c57561-b8c9-4158-bdd8-d782214a7549-kube-api-access-d2m2l\") pod \"nova-scheduler-0\" (UID: \"05c57561-b8c9-4158-bdd8-d782214a7549\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.198091 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/928b80de-c061-4cde-a6e6-b6ac1e28134a-config-data\") pod \"nova-api-0\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.198122 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c57561-b8c9-4158-bdd8-d782214a7549-config-data\") pod \"nova-scheduler-0\" (UID: \"05c57561-b8c9-4158-bdd8-d782214a7549\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.198184 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g72zn\" (UniqueName: \"kubernetes.io/projected/928b80de-c061-4cde-a6e6-b6ac1e28134a-kube-api-access-g72zn\") pod \"nova-api-0\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.200121 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/928b80de-c061-4cde-a6e6-b6ac1e28134a-logs\") pod \"nova-api-0\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.203762 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/928b80de-c061-4cde-a6e6-b6ac1e28134a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.208621 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/928b80de-c061-4cde-a6e6-b6ac1e28134a-config-data\") pod \"nova-api-0\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.216030 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g72zn\" (UniqueName: \"kubernetes.io/projected/928b80de-c061-4cde-a6e6-b6ac1e28134a-kube-api-access-g72zn\") pod \"nova-api-0\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.300595 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c57561-b8c9-4158-bdd8-d782214a7549-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"05c57561-b8c9-4158-bdd8-d782214a7549\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.300657 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2m2l\" (UniqueName: \"kubernetes.io/projected/05c57561-b8c9-4158-bdd8-d782214a7549-kube-api-access-d2m2l\") pod \"nova-scheduler-0\" (UID: \"05c57561-b8c9-4158-bdd8-d782214a7549\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.300683 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c57561-b8c9-4158-bdd8-d782214a7549-config-data\") pod \"nova-scheduler-0\" (UID: \"05c57561-b8c9-4158-bdd8-d782214a7549\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.310057 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c57561-b8c9-4158-bdd8-d782214a7549-config-data\") pod \"nova-scheduler-0\" (UID: \"05c57561-b8c9-4158-bdd8-d782214a7549\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.310829 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c57561-b8c9-4158-bdd8-d782214a7549-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"05c57561-b8c9-4158-bdd8-d782214a7549\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.316989 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2m2l\" (UniqueName: \"kubernetes.io/projected/05c57561-b8c9-4158-bdd8-d782214a7549-kube-api-access-d2m2l\") pod \"nova-scheduler-0\" (UID: \"05c57561-b8c9-4158-bdd8-d782214a7549\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.359276 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.586879 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.652139 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1943a483-a0f9-4cb9-a16d-7c7acd604f6e" path="/var/lib/kubelet/pods/1943a483-a0f9-4cb9-a16d-7c7acd604f6e/volumes" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.656158 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c36cbd0-636c-44e4-855a-9e407e31361e" path="/var/lib/kubelet/pods/8c36cbd0-636c-44e4-855a-9e407e31361e/volumes" Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.826949 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.941749 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"928b80de-c061-4cde-a6e6-b6ac1e28134a","Type":"ContainerStarted","Data":"60e544760a9035d00dd6778dda0545a6cc726e9f74410a5b9ff0e4beb3e09407"} Sep 30 20:34:01 crc kubenswrapper[4919]: I0930 20:34:01.945418 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3162c33a-7a1b-45ce-9e03-65573dde1865","Type":"ContainerStarted","Data":"4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311"} Sep 30 20:34:02 crc kubenswrapper[4919]: W0930 20:34:02.061580 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05c57561_b8c9_4158_bdd8_d782214a7549.slice/crio-d8c0c93079f59e204e9c18d6fa013302df78d115269baf9ef8e3ae1d39c11b34 WatchSource:0}: Error finding container d8c0c93079f59e204e9c18d6fa013302df78d115269baf9ef8e3ae1d39c11b34: Status 404 returned error can't find the container with id d8c0c93079f59e204e9c18d6fa013302df78d115269baf9ef8e3ae1d39c11b34 Sep 30 20:34:02 crc kubenswrapper[4919]: I0930 20:34:02.064069 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:34:02 crc kubenswrapper[4919]: I0930 20:34:02.957340 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"05c57561-b8c9-4158-bdd8-d782214a7549","Type":"ContainerStarted","Data":"2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c"} Sep 30 20:34:02 crc kubenswrapper[4919]: I0930 20:34:02.957867 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"05c57561-b8c9-4158-bdd8-d782214a7549","Type":"ContainerStarted","Data":"d8c0c93079f59e204e9c18d6fa013302df78d115269baf9ef8e3ae1d39c11b34"} Sep 30 20:34:02 crc kubenswrapper[4919]: I0930 20:34:02.959901 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"928b80de-c061-4cde-a6e6-b6ac1e28134a","Type":"ContainerStarted","Data":"3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c"} Sep 30 20:34:02 crc kubenswrapper[4919]: I0930 20:34:02.959942 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"928b80de-c061-4cde-a6e6-b6ac1e28134a","Type":"ContainerStarted","Data":"80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765"} Sep 30 20:34:02 crc kubenswrapper[4919]: I0930 20:34:02.963301 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3162c33a-7a1b-45ce-9e03-65573dde1865","Type":"ContainerStarted","Data":"930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe"} Sep 30 20:34:02 crc kubenswrapper[4919]: I0930 20:34:02.963855 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:34:02 crc kubenswrapper[4919]: I0930 20:34:02.982041 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.982019845 podStartE2EDuration="2.982019845s" podCreationTimestamp="2025-09-30 20:34:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:34:02.97663456 +0000 UTC m=+1228.092667707" watchObservedRunningTime="2025-09-30 20:34:02.982019845 +0000 UTC m=+1228.098052982" Sep 30 20:34:03 crc kubenswrapper[4919]: I0930 20:34:03.003459 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.003431182 podStartE2EDuration="3.003431182s" podCreationTimestamp="2025-09-30 20:34:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:34:02.992813846 +0000 UTC m=+1228.108847013" watchObservedRunningTime="2025-09-30 20:34:03.003431182 +0000 UTC m=+1228.119464339" Sep 30 20:34:03 crc kubenswrapper[4919]: I0930 20:34:03.030999 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.385775576 podStartE2EDuration="6.030974256s" podCreationTimestamp="2025-09-30 20:33:57 +0000 UTC" firstStartedPulling="2025-09-30 20:33:58.779122168 +0000 UTC m=+1223.895155295" lastFinishedPulling="2025-09-30 20:34:02.424320818 +0000 UTC m=+1227.540353975" observedRunningTime="2025-09-30 20:34:03.016375615 +0000 UTC m=+1228.132408802" watchObservedRunningTime="2025-09-30 20:34:03.030974256 +0000 UTC m=+1228.147007393" Sep 30 20:34:05 crc kubenswrapper[4919]: I0930 20:34:05.514597 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 20:34:05 crc kubenswrapper[4919]: I0930 20:34:05.517289 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 20:34:06 crc kubenswrapper[4919]: I0930 20:34:06.526418 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:34:06 crc kubenswrapper[4919]: I0930 20:34:06.526446 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:34:06 crc kubenswrapper[4919]: I0930 20:34:06.587881 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 20:34:11 crc kubenswrapper[4919]: I0930 20:34:11.360826 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:34:11 crc kubenswrapper[4919]: I0930 20:34:11.361179 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:34:11 crc kubenswrapper[4919]: I0930 20:34:11.587575 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 20:34:11 crc kubenswrapper[4919]: I0930 20:34:11.651684 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 20:34:12 crc kubenswrapper[4919]: I0930 20:34:12.117528 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 20:34:12 crc kubenswrapper[4919]: I0930 20:34:12.443446 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="928b80de-c061-4cde-a6e6-b6ac1e28134a" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.199:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:34:12 crc kubenswrapper[4919]: I0930 20:34:12.443510 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="928b80de-c061-4cde-a6e6-b6ac1e28134a" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.199:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 30 20:34:15 crc kubenswrapper[4919]: I0930 20:34:15.527562 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 20:34:15 crc kubenswrapper[4919]: I0930 20:34:15.528378 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 20:34:15 crc kubenswrapper[4919]: I0930 20:34:15.539732 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 20:34:15 crc kubenswrapper[4919]: I0930 20:34:15.541809 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.104341 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.177948 4919 generic.go:334] "Generic (PLEG): container finished" podID="dbd6a165-4eb6-4758-831e-01f3ef8f0b30" containerID="f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37" exitCode=137 Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.178303 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dbd6a165-4eb6-4758-831e-01f3ef8f0b30","Type":"ContainerDied","Data":"f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37"} Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.178432 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dbd6a165-4eb6-4758-831e-01f3ef8f0b30","Type":"ContainerDied","Data":"a3f238e9b564752622e442dd68c2cd06faa110ff4ddaf35d407ec0402072b092"} Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.178531 4919 scope.go:117] "RemoveContainer" containerID="f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.178740 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.204346 4919 scope.go:117] "RemoveContainer" containerID="f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37" Sep 30 20:34:18 crc kubenswrapper[4919]: E0930 20:34:18.204860 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37\": container with ID starting with f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37 not found: ID does not exist" containerID="f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.205005 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37"} err="failed to get container status \"f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37\": rpc error: code = NotFound desc = could not find container \"f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37\": container with ID starting with f75beda5f5cd764c06a3b368041ecb5584f795d0a8ac86fe9a15c55f4de2dd37 not found: ID does not exist" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.216623 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-config-data\") pod \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\" (UID: \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\") " Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.216852 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzkvz\" (UniqueName: \"kubernetes.io/projected/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-kube-api-access-xzkvz\") pod \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\" (UID: \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\") " Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.216939 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-combined-ca-bundle\") pod \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\" (UID: \"dbd6a165-4eb6-4758-831e-01f3ef8f0b30\") " Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.229597 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-kube-api-access-xzkvz" (OuterVolumeSpecName: "kube-api-access-xzkvz") pod "dbd6a165-4eb6-4758-831e-01f3ef8f0b30" (UID: "dbd6a165-4eb6-4758-831e-01f3ef8f0b30"). InnerVolumeSpecName "kube-api-access-xzkvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.245363 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dbd6a165-4eb6-4758-831e-01f3ef8f0b30" (UID: "dbd6a165-4eb6-4758-831e-01f3ef8f0b30"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.257909 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-config-data" (OuterVolumeSpecName: "config-data") pod "dbd6a165-4eb6-4758-831e-01f3ef8f0b30" (UID: "dbd6a165-4eb6-4758-831e-01f3ef8f0b30"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.319047 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzkvz\" (UniqueName: \"kubernetes.io/projected/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-kube-api-access-xzkvz\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.319085 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.319098 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbd6a165-4eb6-4758-831e-01f3ef8f0b30-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.542456 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.562098 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.572082 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:34:18 crc kubenswrapper[4919]: E0930 20:34:18.572905 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbd6a165-4eb6-4758-831e-01f3ef8f0b30" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.573021 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbd6a165-4eb6-4758-831e-01f3ef8f0b30" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.573409 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbd6a165-4eb6-4758-831e-01f3ef8f0b30" containerName="nova-cell1-novncproxy-novncproxy" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.574547 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.581409 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.620028 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.620232 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.620275 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.729115 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffac99cf-7663-4d4c-a617-2b15d249f07b-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.729194 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmlnq\" (UniqueName: \"kubernetes.io/projected/ffac99cf-7663-4d4c-a617-2b15d249f07b-kube-api-access-zmlnq\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.729256 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffac99cf-7663-4d4c-a617-2b15d249f07b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.729333 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffac99cf-7663-4d4c-a617-2b15d249f07b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.729358 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffac99cf-7663-4d4c-a617-2b15d249f07b-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.831773 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffac99cf-7663-4d4c-a617-2b15d249f07b-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.831920 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmlnq\" (UniqueName: \"kubernetes.io/projected/ffac99cf-7663-4d4c-a617-2b15d249f07b-kube-api-access-zmlnq\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.831986 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffac99cf-7663-4d4c-a617-2b15d249f07b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.832130 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffac99cf-7663-4d4c-a617-2b15d249f07b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.832169 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffac99cf-7663-4d4c-a617-2b15d249f07b-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.836856 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffac99cf-7663-4d4c-a617-2b15d249f07b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.838171 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffac99cf-7663-4d4c-a617-2b15d249f07b-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.838457 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ffac99cf-7663-4d4c-a617-2b15d249f07b-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.838735 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffac99cf-7663-4d4c-a617-2b15d249f07b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.850834 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmlnq\" (UniqueName: \"kubernetes.io/projected/ffac99cf-7663-4d4c-a617-2b15d249f07b-kube-api-access-zmlnq\") pod \"nova-cell1-novncproxy-0\" (UID: \"ffac99cf-7663-4d4c-a617-2b15d249f07b\") " pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:18 crc kubenswrapper[4919]: I0930 20:34:18.948710 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:19 crc kubenswrapper[4919]: I0930 20:34:19.407698 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 30 20:34:19 crc kubenswrapper[4919]: W0930 20:34:19.413549 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffac99cf_7663_4d4c_a617_2b15d249f07b.slice/crio-11a641fcedee315c050c999e6e15d4b28d7125e503e3267bd0664ba60f2685c6 WatchSource:0}: Error finding container 11a641fcedee315c050c999e6e15d4b28d7125e503e3267bd0664ba60f2685c6: Status 404 returned error can't find the container with id 11a641fcedee315c050c999e6e15d4b28d7125e503e3267bd0664ba60f2685c6 Sep 30 20:34:19 crc kubenswrapper[4919]: I0930 20:34:19.651932 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbd6a165-4eb6-4758-831e-01f3ef8f0b30" path="/var/lib/kubelet/pods/dbd6a165-4eb6-4758-831e-01f3ef8f0b30/volumes" Sep 30 20:34:20 crc kubenswrapper[4919]: I0930 20:34:20.212311 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ffac99cf-7663-4d4c-a617-2b15d249f07b","Type":"ContainerStarted","Data":"3f2c4f4af4db6fa52ad4295831670a5eaa0ed93075f246f78a24bc8dee6e4be1"} Sep 30 20:34:20 crc kubenswrapper[4919]: I0930 20:34:20.212716 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"ffac99cf-7663-4d4c-a617-2b15d249f07b","Type":"ContainerStarted","Data":"11a641fcedee315c050c999e6e15d4b28d7125e503e3267bd0664ba60f2685c6"} Sep 30 20:34:20 crc kubenswrapper[4919]: I0930 20:34:20.243422 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.2434039 podStartE2EDuration="2.2434039s" podCreationTimestamp="2025-09-30 20:34:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:34:20.235870822 +0000 UTC m=+1245.351904029" watchObservedRunningTime="2025-09-30 20:34:20.2434039 +0000 UTC m=+1245.359437027" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.365358 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.365804 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.368154 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.368231 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.372285 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.374099 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.574059 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-9x4kk"] Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.578358 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.679641 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-9x4kk"] Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.692724 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.692928 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-config\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.693023 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.693140 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.693277 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlkbc\" (UniqueName: \"kubernetes.io/projected/38a281fa-ebba-4dab-92eb-26a591f96dc4-kube-api-access-wlkbc\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.693420 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.795266 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.795531 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-config\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.795631 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.795812 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.795949 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlkbc\" (UniqueName: \"kubernetes.io/projected/38a281fa-ebba-4dab-92eb-26a591f96dc4-kube-api-access-wlkbc\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.796060 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.797587 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-dns-swift-storage-0\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.798643 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-config\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.799645 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-dns-svc\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.801011 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-ovsdbserver-sb\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.802887 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-ovsdbserver-nb\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.818807 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlkbc\" (UniqueName: \"kubernetes.io/projected/38a281fa-ebba-4dab-92eb-26a591f96dc4-kube-api-access-wlkbc\") pod \"dnsmasq-dns-59cf4bdb65-9x4kk\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:21 crc kubenswrapper[4919]: I0930 20:34:21.919361 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:22 crc kubenswrapper[4919]: I0930 20:34:22.405821 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-9x4kk"] Sep 30 20:34:23 crc kubenswrapper[4919]: I0930 20:34:23.242917 4919 generic.go:334] "Generic (PLEG): container finished" podID="38a281fa-ebba-4dab-92eb-26a591f96dc4" containerID="502ff1a9c34510a92f87b0b2a26e84c4ff4ae3dcbb2ac4512b3c745db43be140" exitCode=0 Sep 30 20:34:23 crc kubenswrapper[4919]: I0930 20:34:23.242986 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" event={"ID":"38a281fa-ebba-4dab-92eb-26a591f96dc4","Type":"ContainerDied","Data":"502ff1a9c34510a92f87b0b2a26e84c4ff4ae3dcbb2ac4512b3c745db43be140"} Sep 30 20:34:23 crc kubenswrapper[4919]: I0930 20:34:23.243457 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" event={"ID":"38a281fa-ebba-4dab-92eb-26a591f96dc4","Type":"ContainerStarted","Data":"1054315d2935fda7dedf5b2f476d270539c7a7b265749686f2a443133812391d"} Sep 30 20:34:23 crc kubenswrapper[4919]: I0930 20:34:23.585551 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:34:23 crc kubenswrapper[4919]: I0930 20:34:23.586080 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="ceilometer-central-agent" containerID="cri-o://3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5" gracePeriod=30 Sep 30 20:34:23 crc kubenswrapper[4919]: I0930 20:34:23.586505 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="proxy-httpd" containerID="cri-o://930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe" gracePeriod=30 Sep 30 20:34:23 crc kubenswrapper[4919]: I0930 20:34:23.586585 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="ceilometer-notification-agent" containerID="cri-o://292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1" gracePeriod=30 Sep 30 20:34:23 crc kubenswrapper[4919]: I0930 20:34:23.586816 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="sg-core" containerID="cri-o://4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311" gracePeriod=30 Sep 30 20:34:23 crc kubenswrapper[4919]: I0930 20:34:23.594389 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.198:3000/\": read tcp 10.217.0.2:41434->10.217.0.198:3000: read: connection reset by peer" Sep 30 20:34:23 crc kubenswrapper[4919]: I0930 20:34:23.949543 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:24 crc kubenswrapper[4919]: I0930 20:34:24.100602 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:24 crc kubenswrapper[4919]: I0930 20:34:24.254075 4919 generic.go:334] "Generic (PLEG): container finished" podID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerID="930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe" exitCode=0 Sep 30 20:34:24 crc kubenswrapper[4919]: I0930 20:34:24.254355 4919 generic.go:334] "Generic (PLEG): container finished" podID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerID="4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311" exitCode=2 Sep 30 20:34:24 crc kubenswrapper[4919]: I0930 20:34:24.254455 4919 generic.go:334] "Generic (PLEG): container finished" podID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerID="3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5" exitCode=0 Sep 30 20:34:24 crc kubenswrapper[4919]: I0930 20:34:24.254168 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3162c33a-7a1b-45ce-9e03-65573dde1865","Type":"ContainerDied","Data":"930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe"} Sep 30 20:34:24 crc kubenswrapper[4919]: I0930 20:34:24.254642 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3162c33a-7a1b-45ce-9e03-65573dde1865","Type":"ContainerDied","Data":"4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311"} Sep 30 20:34:24 crc kubenswrapper[4919]: I0930 20:34:24.254726 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3162c33a-7a1b-45ce-9e03-65573dde1865","Type":"ContainerDied","Data":"3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5"} Sep 30 20:34:24 crc kubenswrapper[4919]: I0930 20:34:24.256001 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" event={"ID":"38a281fa-ebba-4dab-92eb-26a591f96dc4","Type":"ContainerStarted","Data":"b71bcd8bd2e9511a735ac627b3d781b42898ac5f98c3e179ec841e19fe76fa0a"} Sep 30 20:34:24 crc kubenswrapper[4919]: I0930 20:34:24.256265 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="928b80de-c061-4cde-a6e6-b6ac1e28134a" containerName="nova-api-log" containerID="cri-o://80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765" gracePeriod=30 Sep 30 20:34:24 crc kubenswrapper[4919]: I0930 20:34:24.256269 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:24 crc kubenswrapper[4919]: I0930 20:34:24.256316 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="928b80de-c061-4cde-a6e6-b6ac1e28134a" containerName="nova-api-api" containerID="cri-o://3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c" gracePeriod=30 Sep 30 20:34:24 crc kubenswrapper[4919]: I0930 20:34:24.291188 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" podStartSLOduration=3.291172555 podStartE2EDuration="3.291172555s" podCreationTimestamp="2025-09-30 20:34:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:34:24.286869751 +0000 UTC m=+1249.402902878" watchObservedRunningTime="2025-09-30 20:34:24.291172555 +0000 UTC m=+1249.407205682" Sep 30 20:34:25 crc kubenswrapper[4919]: I0930 20:34:25.269019 4919 generic.go:334] "Generic (PLEG): container finished" podID="928b80de-c061-4cde-a6e6-b6ac1e28134a" containerID="80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765" exitCode=143 Sep 30 20:34:25 crc kubenswrapper[4919]: I0930 20:34:25.270325 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"928b80de-c061-4cde-a6e6-b6ac1e28134a","Type":"ContainerDied","Data":"80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765"} Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.062812 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.062915 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.795799 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.910184 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-config-data\") pod \"3162c33a-7a1b-45ce-9e03-65573dde1865\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.910738 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3162c33a-7a1b-45ce-9e03-65573dde1865-run-httpd\") pod \"3162c33a-7a1b-45ce-9e03-65573dde1865\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.910811 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dvmz\" (UniqueName: \"kubernetes.io/projected/3162c33a-7a1b-45ce-9e03-65573dde1865-kube-api-access-8dvmz\") pod \"3162c33a-7a1b-45ce-9e03-65573dde1865\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.910845 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-sg-core-conf-yaml\") pod \"3162c33a-7a1b-45ce-9e03-65573dde1865\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.910881 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-scripts\") pod \"3162c33a-7a1b-45ce-9e03-65573dde1865\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.910906 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3162c33a-7a1b-45ce-9e03-65573dde1865-log-httpd\") pod \"3162c33a-7a1b-45ce-9e03-65573dde1865\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.910927 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-ceilometer-tls-certs\") pod \"3162c33a-7a1b-45ce-9e03-65573dde1865\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.910990 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-combined-ca-bundle\") pod \"3162c33a-7a1b-45ce-9e03-65573dde1865\" (UID: \"3162c33a-7a1b-45ce-9e03-65573dde1865\") " Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.911675 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3162c33a-7a1b-45ce-9e03-65573dde1865-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3162c33a-7a1b-45ce-9e03-65573dde1865" (UID: "3162c33a-7a1b-45ce-9e03-65573dde1865"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.912496 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3162c33a-7a1b-45ce-9e03-65573dde1865-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3162c33a-7a1b-45ce-9e03-65573dde1865" (UID: "3162c33a-7a1b-45ce-9e03-65573dde1865"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.913613 4919 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3162c33a-7a1b-45ce-9e03-65573dde1865-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.913644 4919 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3162c33a-7a1b-45ce-9e03-65573dde1865-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.918388 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3162c33a-7a1b-45ce-9e03-65573dde1865-kube-api-access-8dvmz" (OuterVolumeSpecName: "kube-api-access-8dvmz") pod "3162c33a-7a1b-45ce-9e03-65573dde1865" (UID: "3162c33a-7a1b-45ce-9e03-65573dde1865"). InnerVolumeSpecName "kube-api-access-8dvmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.919434 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-scripts" (OuterVolumeSpecName: "scripts") pod "3162c33a-7a1b-45ce-9e03-65573dde1865" (UID: "3162c33a-7a1b-45ce-9e03-65573dde1865"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:26 crc kubenswrapper[4919]: I0930 20:34:26.954525 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3162c33a-7a1b-45ce-9e03-65573dde1865" (UID: "3162c33a-7a1b-45ce-9e03-65573dde1865"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.004001 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "3162c33a-7a1b-45ce-9e03-65573dde1865" (UID: "3162c33a-7a1b-45ce-9e03-65573dde1865"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.016042 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dvmz\" (UniqueName: \"kubernetes.io/projected/3162c33a-7a1b-45ce-9e03-65573dde1865-kube-api-access-8dvmz\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.016084 4919 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.016097 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.016111 4919 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.030502 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3162c33a-7a1b-45ce-9e03-65573dde1865" (UID: "3162c33a-7a1b-45ce-9e03-65573dde1865"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.064560 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-config-data" (OuterVolumeSpecName: "config-data") pod "3162c33a-7a1b-45ce-9e03-65573dde1865" (UID: "3162c33a-7a1b-45ce-9e03-65573dde1865"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.118086 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.118493 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3162c33a-7a1b-45ce-9e03-65573dde1865-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.301041 4919 generic.go:334] "Generic (PLEG): container finished" podID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerID="292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1" exitCode=0 Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.301103 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3162c33a-7a1b-45ce-9e03-65573dde1865","Type":"ContainerDied","Data":"292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1"} Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.301133 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3162c33a-7a1b-45ce-9e03-65573dde1865","Type":"ContainerDied","Data":"47d6c6d01acf68b88558b34ec48240d3d0f7ff1f9d49e50ecdd4cdc843e98e58"} Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.301169 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.301173 4919 scope.go:117] "RemoveContainer" containerID="930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.336160 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.343126 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.353531 4919 scope.go:117] "RemoveContainer" containerID="4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.387717 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:34:27 crc kubenswrapper[4919]: E0930 20:34:27.388095 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="ceilometer-notification-agent" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.388114 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="ceilometer-notification-agent" Sep 30 20:34:27 crc kubenswrapper[4919]: E0930 20:34:27.388132 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="sg-core" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.388138 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="sg-core" Sep 30 20:34:27 crc kubenswrapper[4919]: E0930 20:34:27.388160 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="proxy-httpd" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.388168 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="proxy-httpd" Sep 30 20:34:27 crc kubenswrapper[4919]: E0930 20:34:27.388183 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="ceilometer-central-agent" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.388190 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="ceilometer-central-agent" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.388382 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="ceilometer-central-agent" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.388399 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="sg-core" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.388417 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="ceilometer-notification-agent" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.388432 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" containerName="proxy-httpd" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.390253 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.394062 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.394080 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.394131 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.401272 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.411009 4919 scope.go:117] "RemoveContainer" containerID="292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.442301 4919 scope.go:117] "RemoveContainer" containerID="3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.464205 4919 scope.go:117] "RemoveContainer" containerID="930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe" Sep 30 20:34:27 crc kubenswrapper[4919]: E0930 20:34:27.464530 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe\": container with ID starting with 930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe not found: ID does not exist" containerID="930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.464570 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe"} err="failed to get container status \"930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe\": rpc error: code = NotFound desc = could not find container \"930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe\": container with ID starting with 930559625f86c80b13d2f0eef64733e56bb48f443f60245caaf531249b28affe not found: ID does not exist" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.464597 4919 scope.go:117] "RemoveContainer" containerID="4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311" Sep 30 20:34:27 crc kubenswrapper[4919]: E0930 20:34:27.465155 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311\": container with ID starting with 4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311 not found: ID does not exist" containerID="4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.465182 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311"} err="failed to get container status \"4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311\": rpc error: code = NotFound desc = could not find container \"4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311\": container with ID starting with 4832ebcd88111e25027340d33b9cdf34e99cfe2650917a7a69e68018b6762311 not found: ID does not exist" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.465199 4919 scope.go:117] "RemoveContainer" containerID="292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1" Sep 30 20:34:27 crc kubenswrapper[4919]: E0930 20:34:27.465395 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1\": container with ID starting with 292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1 not found: ID does not exist" containerID="292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.465414 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1"} err="failed to get container status \"292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1\": rpc error: code = NotFound desc = could not find container \"292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1\": container with ID starting with 292f859a2f8b6009b87ae3243ebd015c6c6bdf73118ee84793907602fae51ed1 not found: ID does not exist" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.465430 4919 scope.go:117] "RemoveContainer" containerID="3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5" Sep 30 20:34:27 crc kubenswrapper[4919]: E0930 20:34:27.465894 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5\": container with ID starting with 3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5 not found: ID does not exist" containerID="3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.465924 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5"} err="failed to get container status \"3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5\": rpc error: code = NotFound desc = could not find container \"3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5\": container with ID starting with 3d655d71eab4c4b841f6cc34addd7b7d8a43c161158242ffc5e93883b9b6c1e5 not found: ID does not exist" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.534364 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-scripts\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.534745 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lbf2\" (UniqueName: \"kubernetes.io/projected/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-kube-api-access-2lbf2\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.534796 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-config-data\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.534866 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-log-httpd\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.534907 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.534943 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.535002 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.535025 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-run-httpd\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.636819 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-run-httpd\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.636873 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-scripts\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.636902 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lbf2\" (UniqueName: \"kubernetes.io/projected/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-kube-api-access-2lbf2\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.636947 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-config-data\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.637011 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-log-httpd\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.637053 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.637087 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.637144 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.637600 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-run-httpd\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.637945 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-log-httpd\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.642694 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.642891 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-scripts\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.643142 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-config-data\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.643295 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.645619 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.651317 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3162c33a-7a1b-45ce-9e03-65573dde1865" path="/var/lib/kubelet/pods/3162c33a-7a1b-45ce-9e03-65573dde1865/volumes" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.659982 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lbf2\" (UniqueName: \"kubernetes.io/projected/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-kube-api-access-2lbf2\") pod \"ceilometer-0\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.735896 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.823672 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.945652 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g72zn\" (UniqueName: \"kubernetes.io/projected/928b80de-c061-4cde-a6e6-b6ac1e28134a-kube-api-access-g72zn\") pod \"928b80de-c061-4cde-a6e6-b6ac1e28134a\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.945746 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/928b80de-c061-4cde-a6e6-b6ac1e28134a-combined-ca-bundle\") pod \"928b80de-c061-4cde-a6e6-b6ac1e28134a\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.945899 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/928b80de-c061-4cde-a6e6-b6ac1e28134a-logs\") pod \"928b80de-c061-4cde-a6e6-b6ac1e28134a\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.945979 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/928b80de-c061-4cde-a6e6-b6ac1e28134a-config-data\") pod \"928b80de-c061-4cde-a6e6-b6ac1e28134a\" (UID: \"928b80de-c061-4cde-a6e6-b6ac1e28134a\") " Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.948612 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/928b80de-c061-4cde-a6e6-b6ac1e28134a-logs" (OuterVolumeSpecName: "logs") pod "928b80de-c061-4cde-a6e6-b6ac1e28134a" (UID: "928b80de-c061-4cde-a6e6-b6ac1e28134a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.956358 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/928b80de-c061-4cde-a6e6-b6ac1e28134a-kube-api-access-g72zn" (OuterVolumeSpecName: "kube-api-access-g72zn") pod "928b80de-c061-4cde-a6e6-b6ac1e28134a" (UID: "928b80de-c061-4cde-a6e6-b6ac1e28134a"). InnerVolumeSpecName "kube-api-access-g72zn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.983033 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/928b80de-c061-4cde-a6e6-b6ac1e28134a-config-data" (OuterVolumeSpecName: "config-data") pod "928b80de-c061-4cde-a6e6-b6ac1e28134a" (UID: "928b80de-c061-4cde-a6e6-b6ac1e28134a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:27 crc kubenswrapper[4919]: I0930 20:34:27.988034 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/928b80de-c061-4cde-a6e6-b6ac1e28134a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "928b80de-c061-4cde-a6e6-b6ac1e28134a" (UID: "928b80de-c061-4cde-a6e6-b6ac1e28134a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.048043 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/928b80de-c061-4cde-a6e6-b6ac1e28134a-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.048085 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g72zn\" (UniqueName: \"kubernetes.io/projected/928b80de-c061-4cde-a6e6-b6ac1e28134a-kube-api-access-g72zn\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.048099 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/928b80de-c061-4cde-a6e6-b6ac1e28134a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.048111 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/928b80de-c061-4cde-a6e6-b6ac1e28134a-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.281952 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.351442 4919 generic.go:334] "Generic (PLEG): container finished" podID="928b80de-c061-4cde-a6e6-b6ac1e28134a" containerID="3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c" exitCode=0 Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.351514 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"928b80de-c061-4cde-a6e6-b6ac1e28134a","Type":"ContainerDied","Data":"3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c"} Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.351541 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"928b80de-c061-4cde-a6e6-b6ac1e28134a","Type":"ContainerDied","Data":"60e544760a9035d00dd6778dda0545a6cc726e9f74410a5b9ff0e4beb3e09407"} Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.351555 4919 scope.go:117] "RemoveContainer" containerID="3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.351695 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.365548 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"972acd61-5033-4cc0-96a0-65a1e5a4c9f6","Type":"ContainerStarted","Data":"98d18c26c7c00acd83de0f5d2d6b4bce15687ba495b89ad1009b97a162969643"} Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.410318 4919 scope.go:117] "RemoveContainer" containerID="80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.439758 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.446282 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.468665 4919 scope.go:117] "RemoveContainer" containerID="3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c" Sep 30 20:34:28 crc kubenswrapper[4919]: E0930 20:34:28.469975 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c\": container with ID starting with 3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c not found: ID does not exist" containerID="3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.470017 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c"} err="failed to get container status \"3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c\": rpc error: code = NotFound desc = could not find container \"3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c\": container with ID starting with 3980c155cb4ba90174bb1cad1f824e88008424e2845c4b8b78c47b7ee332dd6c not found: ID does not exist" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.470038 4919 scope.go:117] "RemoveContainer" containerID="80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765" Sep 30 20:34:28 crc kubenswrapper[4919]: E0930 20:34:28.473359 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765\": container with ID starting with 80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765 not found: ID does not exist" containerID="80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.473403 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765"} err="failed to get container status \"80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765\": rpc error: code = NotFound desc = could not find container \"80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765\": container with ID starting with 80530b3f2f3f676554d15ce6b6271529042dcc8512b0081912fc61879ffd2765 not found: ID does not exist" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.474158 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:28 crc kubenswrapper[4919]: E0930 20:34:28.474570 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="928b80de-c061-4cde-a6e6-b6ac1e28134a" containerName="nova-api-log" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.474587 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="928b80de-c061-4cde-a6e6-b6ac1e28134a" containerName="nova-api-log" Sep 30 20:34:28 crc kubenswrapper[4919]: E0930 20:34:28.474633 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="928b80de-c061-4cde-a6e6-b6ac1e28134a" containerName="nova-api-api" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.474640 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="928b80de-c061-4cde-a6e6-b6ac1e28134a" containerName="nova-api-api" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.474845 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="928b80de-c061-4cde-a6e6-b6ac1e28134a" containerName="nova-api-log" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.474881 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="928b80de-c061-4cde-a6e6-b6ac1e28134a" containerName="nova-api-api" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.476063 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.481421 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.481592 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.481756 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.496938 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.562649 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2987dd1c-64db-4eee-bac5-b3e4c577947d-logs\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.562903 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.563024 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.563078 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-config-data\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.563099 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l68r5\" (UniqueName: \"kubernetes.io/projected/2987dd1c-64db-4eee-bac5-b3e4c577947d-kube-api-access-l68r5\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.563155 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-public-tls-certs\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.665246 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2987dd1c-64db-4eee-bac5-b3e4c577947d-logs\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.665435 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.665595 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.665715 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-config-data\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.665783 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l68r5\" (UniqueName: \"kubernetes.io/projected/2987dd1c-64db-4eee-bac5-b3e4c577947d-kube-api-access-l68r5\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.665805 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2987dd1c-64db-4eee-bac5-b3e4c577947d-logs\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.666040 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-public-tls-certs\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.671841 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-config-data\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.673881 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.673902 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.683279 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-public-tls-certs\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.688797 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l68r5\" (UniqueName: \"kubernetes.io/projected/2987dd1c-64db-4eee-bac5-b3e4c577947d-kube-api-access-l68r5\") pod \"nova-api-0\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.794835 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.949929 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:28 crc kubenswrapper[4919]: I0930 20:34:28.977588 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:29 crc kubenswrapper[4919]: W0930 20:34:29.299641 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2987dd1c_64db_4eee_bac5_b3e4c577947d.slice/crio-20bf64a368c496ef5788018d39a8f97a32d869a41e1adcda23dcfb102143302f WatchSource:0}: Error finding container 20bf64a368c496ef5788018d39a8f97a32d869a41e1adcda23dcfb102143302f: Status 404 returned error can't find the container with id 20bf64a368c496ef5788018d39a8f97a32d869a41e1adcda23dcfb102143302f Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.301268 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.395861 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2987dd1c-64db-4eee-bac5-b3e4c577947d","Type":"ContainerStarted","Data":"20bf64a368c496ef5788018d39a8f97a32d869a41e1adcda23dcfb102143302f"} Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.398512 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"972acd61-5033-4cc0-96a0-65a1e5a4c9f6","Type":"ContainerStarted","Data":"cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1"} Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.418586 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.569627 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-chwgq"] Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.571083 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.577729 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.577878 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.581889 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-chwgq"] Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.650626 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="928b80de-c061-4cde-a6e6-b6ac1e28134a" path="/var/lib/kubelet/pods/928b80de-c061-4cde-a6e6-b6ac1e28134a/volumes" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.714202 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rd5f\" (UniqueName: \"kubernetes.io/projected/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-kube-api-access-6rd5f\") pod \"nova-cell1-cell-mapping-chwgq\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.714453 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-scripts\") pod \"nova-cell1-cell-mapping-chwgq\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.714661 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-config-data\") pod \"nova-cell1-cell-mapping-chwgq\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.714751 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-chwgq\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.816848 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rd5f\" (UniqueName: \"kubernetes.io/projected/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-kube-api-access-6rd5f\") pod \"nova-cell1-cell-mapping-chwgq\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.817208 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-scripts\") pod \"nova-cell1-cell-mapping-chwgq\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.817325 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-config-data\") pod \"nova-cell1-cell-mapping-chwgq\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.817375 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-chwgq\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.821801 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-chwgq\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.825716 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-scripts\") pod \"nova-cell1-cell-mapping-chwgq\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.831456 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-config-data\") pod \"nova-cell1-cell-mapping-chwgq\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:29 crc kubenswrapper[4919]: I0930 20:34:29.837563 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rd5f\" (UniqueName: \"kubernetes.io/projected/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-kube-api-access-6rd5f\") pod \"nova-cell1-cell-mapping-chwgq\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:30 crc kubenswrapper[4919]: I0930 20:34:30.083855 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:30 crc kubenswrapper[4919]: I0930 20:34:30.407331 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"972acd61-5033-4cc0-96a0-65a1e5a4c9f6","Type":"ContainerStarted","Data":"6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740"} Sep 30 20:34:30 crc kubenswrapper[4919]: I0930 20:34:30.410287 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2987dd1c-64db-4eee-bac5-b3e4c577947d","Type":"ContainerStarted","Data":"a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482"} Sep 30 20:34:30 crc kubenswrapper[4919]: I0930 20:34:30.410307 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2987dd1c-64db-4eee-bac5-b3e4c577947d","Type":"ContainerStarted","Data":"f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9"} Sep 30 20:34:30 crc kubenswrapper[4919]: I0930 20:34:30.525930 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.525911394 podStartE2EDuration="2.525911394s" podCreationTimestamp="2025-09-30 20:34:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:34:30.439279137 +0000 UTC m=+1255.555312254" watchObservedRunningTime="2025-09-30 20:34:30.525911394 +0000 UTC m=+1255.641944521" Sep 30 20:34:30 crc kubenswrapper[4919]: I0930 20:34:30.531401 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-chwgq"] Sep 30 20:34:31 crc kubenswrapper[4919]: I0930 20:34:31.428867 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-chwgq" event={"ID":"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9","Type":"ContainerStarted","Data":"3c60a188c676999443de7167aedf505359dc147d85460d0aaf72e8e2261f60ed"} Sep 30 20:34:31 crc kubenswrapper[4919]: I0930 20:34:31.429506 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-chwgq" event={"ID":"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9","Type":"ContainerStarted","Data":"88bdc0d784275eb2926db91c5d395e6207d20e6105c8526da31289c3a0e641fd"} Sep 30 20:34:31 crc kubenswrapper[4919]: I0930 20:34:31.440571 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"972acd61-5033-4cc0-96a0-65a1e5a4c9f6","Type":"ContainerStarted","Data":"25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f"} Sep 30 20:34:31 crc kubenswrapper[4919]: I0930 20:34:31.446546 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-chwgq" podStartSLOduration=2.446532773 podStartE2EDuration="2.446532773s" podCreationTimestamp="2025-09-30 20:34:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:34:31.445182794 +0000 UTC m=+1256.561215921" watchObservedRunningTime="2025-09-30 20:34:31.446532773 +0000 UTC m=+1256.562565900" Sep 30 20:34:31 crc kubenswrapper[4919]: I0930 20:34:31.920392 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.013069 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-4gvsn"] Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.013826 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" podUID="df423b76-458b-49a2-94e0-cf51312f09a6" containerName="dnsmasq-dns" containerID="cri-o://5450878e4e9cde6fc7f510d20eab70bd7846d82a764214f69466f5d6fa3aefb8" gracePeriod=10 Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.448824 4919 generic.go:334] "Generic (PLEG): container finished" podID="df423b76-458b-49a2-94e0-cf51312f09a6" containerID="5450878e4e9cde6fc7f510d20eab70bd7846d82a764214f69466f5d6fa3aefb8" exitCode=0 Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.449337 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" event={"ID":"df423b76-458b-49a2-94e0-cf51312f09a6","Type":"ContainerDied","Data":"5450878e4e9cde6fc7f510d20eab70bd7846d82a764214f69466f5d6fa3aefb8"} Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.449400 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" event={"ID":"df423b76-458b-49a2-94e0-cf51312f09a6","Type":"ContainerDied","Data":"d6a55336300d68c11dfb32508f993aedfc573f2141147eb77605a4540131bf3d"} Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.449411 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6a55336300d68c11dfb32508f993aedfc573f2141147eb77605a4540131bf3d" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.508564 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.681351 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-ovsdbserver-nb\") pod \"df423b76-458b-49a2-94e0-cf51312f09a6\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.681468 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-dns-swift-storage-0\") pod \"df423b76-458b-49a2-94e0-cf51312f09a6\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.681513 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwqp8\" (UniqueName: \"kubernetes.io/projected/df423b76-458b-49a2-94e0-cf51312f09a6-kube-api-access-pwqp8\") pod \"df423b76-458b-49a2-94e0-cf51312f09a6\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.681587 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-config\") pod \"df423b76-458b-49a2-94e0-cf51312f09a6\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.681624 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-ovsdbserver-sb\") pod \"df423b76-458b-49a2-94e0-cf51312f09a6\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.681649 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-dns-svc\") pod \"df423b76-458b-49a2-94e0-cf51312f09a6\" (UID: \"df423b76-458b-49a2-94e0-cf51312f09a6\") " Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.694538 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df423b76-458b-49a2-94e0-cf51312f09a6-kube-api-access-pwqp8" (OuterVolumeSpecName: "kube-api-access-pwqp8") pod "df423b76-458b-49a2-94e0-cf51312f09a6" (UID: "df423b76-458b-49a2-94e0-cf51312f09a6"). InnerVolumeSpecName "kube-api-access-pwqp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.780454 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "df423b76-458b-49a2-94e0-cf51312f09a6" (UID: "df423b76-458b-49a2-94e0-cf51312f09a6"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.784049 4919 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.784083 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwqp8\" (UniqueName: \"kubernetes.io/projected/df423b76-458b-49a2-94e0-cf51312f09a6-kube-api-access-pwqp8\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.799165 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "df423b76-458b-49a2-94e0-cf51312f09a6" (UID: "df423b76-458b-49a2-94e0-cf51312f09a6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.805585 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "df423b76-458b-49a2-94e0-cf51312f09a6" (UID: "df423b76-458b-49a2-94e0-cf51312f09a6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.810672 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "df423b76-458b-49a2-94e0-cf51312f09a6" (UID: "df423b76-458b-49a2-94e0-cf51312f09a6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.824745 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-config" (OuterVolumeSpecName: "config") pod "df423b76-458b-49a2-94e0-cf51312f09a6" (UID: "df423b76-458b-49a2-94e0-cf51312f09a6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.891966 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.891997 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.892006 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:32 crc kubenswrapper[4919]: I0930 20:34:32.892014 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df423b76-458b-49a2-94e0-cf51312f09a6-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:33 crc kubenswrapper[4919]: I0930 20:34:33.460505 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-845d6d6f59-4gvsn" Sep 30 20:34:33 crc kubenswrapper[4919]: I0930 20:34:33.466331 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"972acd61-5033-4cc0-96a0-65a1e5a4c9f6","Type":"ContainerStarted","Data":"347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714"} Sep 30 20:34:33 crc kubenswrapper[4919]: I0930 20:34:33.466598 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:34:33 crc kubenswrapper[4919]: I0930 20:34:33.505885 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.142690101 podStartE2EDuration="6.505866658s" podCreationTimestamp="2025-09-30 20:34:27 +0000 UTC" firstStartedPulling="2025-09-30 20:34:28.303974563 +0000 UTC m=+1253.420007690" lastFinishedPulling="2025-09-30 20:34:32.66715112 +0000 UTC m=+1257.783184247" observedRunningTime="2025-09-30 20:34:33.497743414 +0000 UTC m=+1258.613776551" watchObservedRunningTime="2025-09-30 20:34:33.505866658 +0000 UTC m=+1258.621899785" Sep 30 20:34:33 crc kubenswrapper[4919]: I0930 20:34:33.529889 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-4gvsn"] Sep 30 20:34:33 crc kubenswrapper[4919]: I0930 20:34:33.540762 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-845d6d6f59-4gvsn"] Sep 30 20:34:33 crc kubenswrapper[4919]: I0930 20:34:33.644768 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df423b76-458b-49a2-94e0-cf51312f09a6" path="/var/lib/kubelet/pods/df423b76-458b-49a2-94e0-cf51312f09a6/volumes" Sep 30 20:34:36 crc kubenswrapper[4919]: I0930 20:34:36.499378 4919 generic.go:334] "Generic (PLEG): container finished" podID="6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9" containerID="3c60a188c676999443de7167aedf505359dc147d85460d0aaf72e8e2261f60ed" exitCode=0 Sep 30 20:34:36 crc kubenswrapper[4919]: I0930 20:34:36.499453 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-chwgq" event={"ID":"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9","Type":"ContainerDied","Data":"3c60a188c676999443de7167aedf505359dc147d85460d0aaf72e8e2261f60ed"} Sep 30 20:34:37 crc kubenswrapper[4919]: I0930 20:34:37.108641 4919 scope.go:117] "RemoveContainer" containerID="ed50a419e60c404046cdc01377daee59c9729cc10b218e6329e7abd606f11b3c" Sep 30 20:34:37 crc kubenswrapper[4919]: I0930 20:34:37.925300 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.017231 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rd5f\" (UniqueName: \"kubernetes.io/projected/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-kube-api-access-6rd5f\") pod \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.017344 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-scripts\") pod \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.017379 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-config-data\") pod \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.017483 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-combined-ca-bundle\") pod \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\" (UID: \"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9\") " Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.023178 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-kube-api-access-6rd5f" (OuterVolumeSpecName: "kube-api-access-6rd5f") pod "6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9" (UID: "6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9"). InnerVolumeSpecName "kube-api-access-6rd5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.028574 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-scripts" (OuterVolumeSpecName: "scripts") pod "6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9" (UID: "6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.047476 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9" (UID: "6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.062339 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-config-data" (OuterVolumeSpecName: "config-data") pod "6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9" (UID: "6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.121144 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rd5f\" (UniqueName: \"kubernetes.io/projected/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-kube-api-access-6rd5f\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.121428 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.121439 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.121448 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.552797 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-chwgq" event={"ID":"6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9","Type":"ContainerDied","Data":"88bdc0d784275eb2926db91c5d395e6207d20e6105c8526da31289c3a0e641fd"} Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.552831 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88bdc0d784275eb2926db91c5d395e6207d20e6105c8526da31289c3a0e641fd" Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.553489 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-chwgq" Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.708035 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.708364 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2987dd1c-64db-4eee-bac5-b3e4c577947d" containerName="nova-api-log" containerID="cri-o://f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9" gracePeriod=30 Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.708459 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2987dd1c-64db-4eee-bac5-b3e4c577947d" containerName="nova-api-api" containerID="cri-o://a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482" gracePeriod=30 Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.725022 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.725287 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="05c57561-b8c9-4158-bdd8-d782214a7549" containerName="nova-scheduler-scheduler" containerID="cri-o://2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c" gracePeriod=30 Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.748852 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.749129 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerName="nova-metadata-log" containerID="cri-o://769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2" gracePeriod=30 Sep 30 20:34:38 crc kubenswrapper[4919]: I0930 20:34:38.749676 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerName="nova-metadata-metadata" containerID="cri-o://585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628" gracePeriod=30 Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.298351 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.450764 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2987dd1c-64db-4eee-bac5-b3e4c577947d-logs\") pod \"2987dd1c-64db-4eee-bac5-b3e4c577947d\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.450820 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-config-data\") pod \"2987dd1c-64db-4eee-bac5-b3e4c577947d\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.450846 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-public-tls-certs\") pod \"2987dd1c-64db-4eee-bac5-b3e4c577947d\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.450899 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-combined-ca-bundle\") pod \"2987dd1c-64db-4eee-bac5-b3e4c577947d\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.451002 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l68r5\" (UniqueName: \"kubernetes.io/projected/2987dd1c-64db-4eee-bac5-b3e4c577947d-kube-api-access-l68r5\") pod \"2987dd1c-64db-4eee-bac5-b3e4c577947d\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.451112 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2987dd1c-64db-4eee-bac5-b3e4c577947d-logs" (OuterVolumeSpecName: "logs") pod "2987dd1c-64db-4eee-bac5-b3e4c577947d" (UID: "2987dd1c-64db-4eee-bac5-b3e4c577947d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.451208 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-internal-tls-certs\") pod \"2987dd1c-64db-4eee-bac5-b3e4c577947d\" (UID: \"2987dd1c-64db-4eee-bac5-b3e4c577947d\") " Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.451823 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2987dd1c-64db-4eee-bac5-b3e4c577947d-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.454912 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2987dd1c-64db-4eee-bac5-b3e4c577947d-kube-api-access-l68r5" (OuterVolumeSpecName: "kube-api-access-l68r5") pod "2987dd1c-64db-4eee-bac5-b3e4c577947d" (UID: "2987dd1c-64db-4eee-bac5-b3e4c577947d"). InnerVolumeSpecName "kube-api-access-l68r5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.481942 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-config-data" (OuterVolumeSpecName: "config-data") pod "2987dd1c-64db-4eee-bac5-b3e4c577947d" (UID: "2987dd1c-64db-4eee-bac5-b3e4c577947d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.491467 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2987dd1c-64db-4eee-bac5-b3e4c577947d" (UID: "2987dd1c-64db-4eee-bac5-b3e4c577947d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.506244 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2987dd1c-64db-4eee-bac5-b3e4c577947d" (UID: "2987dd1c-64db-4eee-bac5-b3e4c577947d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.513431 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2987dd1c-64db-4eee-bac5-b3e4c577947d" (UID: "2987dd1c-64db-4eee-bac5-b3e4c577947d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.554080 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.554132 4919 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.554150 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.554162 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l68r5\" (UniqueName: \"kubernetes.io/projected/2987dd1c-64db-4eee-bac5-b3e4c577947d-kube-api-access-l68r5\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.554173 4919 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2987dd1c-64db-4eee-bac5-b3e4c577947d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.566112 4919 generic.go:334] "Generic (PLEG): container finished" podID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerID="769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2" exitCode=143 Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.566401 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c335a9c2-3185-4ade-8048-c2dc570a2961","Type":"ContainerDied","Data":"769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2"} Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.567941 4919 generic.go:334] "Generic (PLEG): container finished" podID="2987dd1c-64db-4eee-bac5-b3e4c577947d" containerID="a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482" exitCode=0 Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.567976 4919 generic.go:334] "Generic (PLEG): container finished" podID="2987dd1c-64db-4eee-bac5-b3e4c577947d" containerID="f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9" exitCode=143 Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.567983 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2987dd1c-64db-4eee-bac5-b3e4c577947d","Type":"ContainerDied","Data":"a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482"} Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.568008 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.568027 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2987dd1c-64db-4eee-bac5-b3e4c577947d","Type":"ContainerDied","Data":"f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9"} Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.568043 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2987dd1c-64db-4eee-bac5-b3e4c577947d","Type":"ContainerDied","Data":"20bf64a368c496ef5788018d39a8f97a32d869a41e1adcda23dcfb102143302f"} Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.568062 4919 scope.go:117] "RemoveContainer" containerID="a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.600484 4919 scope.go:117] "RemoveContainer" containerID="f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.608959 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.626901 4919 scope.go:117] "RemoveContainer" containerID="a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482" Sep 30 20:34:39 crc kubenswrapper[4919]: E0930 20:34:39.627394 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482\": container with ID starting with a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482 not found: ID does not exist" containerID="a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.627432 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482"} err="failed to get container status \"a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482\": rpc error: code = NotFound desc = could not find container \"a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482\": container with ID starting with a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482 not found: ID does not exist" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.627460 4919 scope.go:117] "RemoveContainer" containerID="f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9" Sep 30 20:34:39 crc kubenswrapper[4919]: E0930 20:34:39.627800 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9\": container with ID starting with f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9 not found: ID does not exist" containerID="f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.627821 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9"} err="failed to get container status \"f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9\": rpc error: code = NotFound desc = could not find container \"f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9\": container with ID starting with f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9 not found: ID does not exist" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.627837 4919 scope.go:117] "RemoveContainer" containerID="a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.628276 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482"} err="failed to get container status \"a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482\": rpc error: code = NotFound desc = could not find container \"a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482\": container with ID starting with a6b9ea7c9d51efd59f93347328172cf8143753908738f7027c76a01999126482 not found: ID does not exist" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.628316 4919 scope.go:117] "RemoveContainer" containerID="f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.628389 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.628673 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9"} err="failed to get container status \"f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9\": rpc error: code = NotFound desc = could not find container \"f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9\": container with ID starting with f674393d10c1d3f9f031552b6071f60935ae39a8eb26d3c2f6bc8458bc4852c9 not found: ID does not exist" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.658542 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2987dd1c-64db-4eee-bac5-b3e4c577947d" path="/var/lib/kubelet/pods/2987dd1c-64db-4eee-bac5-b3e4c577947d/volumes" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.661111 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:39 crc kubenswrapper[4919]: E0930 20:34:39.661531 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df423b76-458b-49a2-94e0-cf51312f09a6" containerName="dnsmasq-dns" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.661555 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="df423b76-458b-49a2-94e0-cf51312f09a6" containerName="dnsmasq-dns" Sep 30 20:34:39 crc kubenswrapper[4919]: E0930 20:34:39.661572 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df423b76-458b-49a2-94e0-cf51312f09a6" containerName="init" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.661580 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="df423b76-458b-49a2-94e0-cf51312f09a6" containerName="init" Sep 30 20:34:39 crc kubenswrapper[4919]: E0930 20:34:39.661616 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2987dd1c-64db-4eee-bac5-b3e4c577947d" containerName="nova-api-api" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.661627 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="2987dd1c-64db-4eee-bac5-b3e4c577947d" containerName="nova-api-api" Sep 30 20:34:39 crc kubenswrapper[4919]: E0930 20:34:39.661647 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9" containerName="nova-manage" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.661656 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9" containerName="nova-manage" Sep 30 20:34:39 crc kubenswrapper[4919]: E0930 20:34:39.661682 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2987dd1c-64db-4eee-bac5-b3e4c577947d" containerName="nova-api-log" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.661689 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="2987dd1c-64db-4eee-bac5-b3e4c577947d" containerName="nova-api-log" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.661918 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9" containerName="nova-manage" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.661948 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="2987dd1c-64db-4eee-bac5-b3e4c577947d" containerName="nova-api-log" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.661959 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="2987dd1c-64db-4eee-bac5-b3e4c577947d" containerName="nova-api-api" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.661969 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="df423b76-458b-49a2-94e0-cf51312f09a6" containerName="dnsmasq-dns" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.663192 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.663332 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.665497 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.668942 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.669658 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.757308 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0218baef-cb5d-45c2-8a23-bb06a2887c7b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.757369 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0218baef-cb5d-45c2-8a23-bb06a2887c7b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.757643 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0218baef-cb5d-45c2-8a23-bb06a2887c7b-public-tls-certs\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.757810 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0218baef-cb5d-45c2-8a23-bb06a2887c7b-config-data\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.757843 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0218baef-cb5d-45c2-8a23-bb06a2887c7b-logs\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.757961 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2grk\" (UniqueName: \"kubernetes.io/projected/0218baef-cb5d-45c2-8a23-bb06a2887c7b-kube-api-access-h2grk\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.859050 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0218baef-cb5d-45c2-8a23-bb06a2887c7b-logs\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.859088 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0218baef-cb5d-45c2-8a23-bb06a2887c7b-config-data\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.859125 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2grk\" (UniqueName: \"kubernetes.io/projected/0218baef-cb5d-45c2-8a23-bb06a2887c7b-kube-api-access-h2grk\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.859159 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0218baef-cb5d-45c2-8a23-bb06a2887c7b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.859180 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0218baef-cb5d-45c2-8a23-bb06a2887c7b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.859266 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0218baef-cb5d-45c2-8a23-bb06a2887c7b-public-tls-certs\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.859645 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0218baef-cb5d-45c2-8a23-bb06a2887c7b-logs\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.864821 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0218baef-cb5d-45c2-8a23-bb06a2887c7b-public-tls-certs\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.865177 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0218baef-cb5d-45c2-8a23-bb06a2887c7b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.865232 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0218baef-cb5d-45c2-8a23-bb06a2887c7b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.869905 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0218baef-cb5d-45c2-8a23-bb06a2887c7b-config-data\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.881162 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2grk\" (UniqueName: \"kubernetes.io/projected/0218baef-cb5d-45c2-8a23-bb06a2887c7b-kube-api-access-h2grk\") pod \"nova-api-0\" (UID: \"0218baef-cb5d-45c2-8a23-bb06a2887c7b\") " pod="openstack/nova-api-0" Sep 30 20:34:39 crc kubenswrapper[4919]: I0930 20:34:39.982938 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.453641 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 30 20:34:40 crc kubenswrapper[4919]: W0930 20:34:40.454570 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0218baef_cb5d_45c2_8a23_bb06a2887c7b.slice/crio-f88f6543350ffc3626d5ef0c26f706e140cbef350ef1c39387260878e63e7726 WatchSource:0}: Error finding container f88f6543350ffc3626d5ef0c26f706e140cbef350ef1c39387260878e63e7726: Status 404 returned error can't find the container with id f88f6543350ffc3626d5ef0c26f706e140cbef350ef1c39387260878e63e7726 Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.530918 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.603619 4919 generic.go:334] "Generic (PLEG): container finished" podID="05c57561-b8c9-4158-bdd8-d782214a7549" containerID="2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c" exitCode=0 Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.603796 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.604122 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"05c57561-b8c9-4158-bdd8-d782214a7549","Type":"ContainerDied","Data":"2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c"} Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.604173 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"05c57561-b8c9-4158-bdd8-d782214a7549","Type":"ContainerDied","Data":"d8c0c93079f59e204e9c18d6fa013302df78d115269baf9ef8e3ae1d39c11b34"} Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.604190 4919 scope.go:117] "RemoveContainer" containerID="2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c" Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.605992 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0218baef-cb5d-45c2-8a23-bb06a2887c7b","Type":"ContainerStarted","Data":"f88f6543350ffc3626d5ef0c26f706e140cbef350ef1c39387260878e63e7726"} Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.646791 4919 scope.go:117] "RemoveContainer" containerID="2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c" Sep 30 20:34:40 crc kubenswrapper[4919]: E0930 20:34:40.647327 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c\": container with ID starting with 2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c not found: ID does not exist" containerID="2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c" Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.647366 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c"} err="failed to get container status \"2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c\": rpc error: code = NotFound desc = could not find container \"2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c\": container with ID starting with 2f6d3ca15cc3438883ea44ef01a447d2af1763042ddbb0c2c3fddde395905f0c not found: ID does not exist" Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.707412 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2m2l\" (UniqueName: \"kubernetes.io/projected/05c57561-b8c9-4158-bdd8-d782214a7549-kube-api-access-d2m2l\") pod \"05c57561-b8c9-4158-bdd8-d782214a7549\" (UID: \"05c57561-b8c9-4158-bdd8-d782214a7549\") " Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.708031 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c57561-b8c9-4158-bdd8-d782214a7549-config-data\") pod \"05c57561-b8c9-4158-bdd8-d782214a7549\" (UID: \"05c57561-b8c9-4158-bdd8-d782214a7549\") " Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.708108 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c57561-b8c9-4158-bdd8-d782214a7549-combined-ca-bundle\") pod \"05c57561-b8c9-4158-bdd8-d782214a7549\" (UID: \"05c57561-b8c9-4158-bdd8-d782214a7549\") " Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.710460 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05c57561-b8c9-4158-bdd8-d782214a7549-kube-api-access-d2m2l" (OuterVolumeSpecName: "kube-api-access-d2m2l") pod "05c57561-b8c9-4158-bdd8-d782214a7549" (UID: "05c57561-b8c9-4158-bdd8-d782214a7549"). InnerVolumeSpecName "kube-api-access-d2m2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.734730 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05c57561-b8c9-4158-bdd8-d782214a7549-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05c57561-b8c9-4158-bdd8-d782214a7549" (UID: "05c57561-b8c9-4158-bdd8-d782214a7549"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.735715 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05c57561-b8c9-4158-bdd8-d782214a7549-config-data" (OuterVolumeSpecName: "config-data") pod "05c57561-b8c9-4158-bdd8-d782214a7549" (UID: "05c57561-b8c9-4158-bdd8-d782214a7549"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.811949 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05c57561-b8c9-4158-bdd8-d782214a7549-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.811986 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2m2l\" (UniqueName: \"kubernetes.io/projected/05c57561-b8c9-4158-bdd8-d782214a7549-kube-api-access-d2m2l\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.812001 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05c57561-b8c9-4158-bdd8-d782214a7549-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.951614 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:34:40 crc kubenswrapper[4919]: I0930 20:34:40.959169 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:40.987532 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:34:41 crc kubenswrapper[4919]: E0930 20:34:40.988328 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05c57561-b8c9-4158-bdd8-d782214a7549" containerName="nova-scheduler-scheduler" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:40.988377 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="05c57561-b8c9-4158-bdd8-d782214a7549" containerName="nova-scheduler-scheduler" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:40.988833 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="05c57561-b8c9-4158-bdd8-d782214a7549" containerName="nova-scheduler-scheduler" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:40.990067 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:40.990199 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.021207 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.028529 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a6ed554-1c0c-4d0e-9506-103517b7b065-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4a6ed554-1c0c-4d0e-9506-103517b7b065\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.028709 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2v5qg\" (UniqueName: \"kubernetes.io/projected/4a6ed554-1c0c-4d0e-9506-103517b7b065-kube-api-access-2v5qg\") pod \"nova-scheduler-0\" (UID: \"4a6ed554-1c0c-4d0e-9506-103517b7b065\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.028766 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a6ed554-1c0c-4d0e-9506-103517b7b065-config-data\") pod \"nova-scheduler-0\" (UID: \"4a6ed554-1c0c-4d0e-9506-103517b7b065\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.133687 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2v5qg\" (UniqueName: \"kubernetes.io/projected/4a6ed554-1c0c-4d0e-9506-103517b7b065-kube-api-access-2v5qg\") pod \"nova-scheduler-0\" (UID: \"4a6ed554-1c0c-4d0e-9506-103517b7b065\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.134199 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a6ed554-1c0c-4d0e-9506-103517b7b065-config-data\") pod \"nova-scheduler-0\" (UID: \"4a6ed554-1c0c-4d0e-9506-103517b7b065\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.134488 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a6ed554-1c0c-4d0e-9506-103517b7b065-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4a6ed554-1c0c-4d0e-9506-103517b7b065\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.140325 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a6ed554-1c0c-4d0e-9506-103517b7b065-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4a6ed554-1c0c-4d0e-9506-103517b7b065\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.140378 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a6ed554-1c0c-4d0e-9506-103517b7b065-config-data\") pod \"nova-scheduler-0\" (UID: \"4a6ed554-1c0c-4d0e-9506-103517b7b065\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.153563 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2v5qg\" (UniqueName: \"kubernetes.io/projected/4a6ed554-1c0c-4d0e-9506-103517b7b065-kube-api-access-2v5qg\") pod \"nova-scheduler-0\" (UID: \"4a6ed554-1c0c-4d0e-9506-103517b7b065\") " pod="openstack/nova-scheduler-0" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.345676 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.626611 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0218baef-cb5d-45c2-8a23-bb06a2887c7b","Type":"ContainerStarted","Data":"d5167cdd841000a4f267ea800f80a98c5614cbda2864cc9bad4ade9dcf43fec6"} Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.626907 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0218baef-cb5d-45c2-8a23-bb06a2887c7b","Type":"ContainerStarted","Data":"8bf5b9b547c19f35cf36397f492400df11c44ce061946af98796297668219582"} Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.651152 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05c57561-b8c9-4158-bdd8-d782214a7549" path="/var/lib/kubelet/pods/05c57561-b8c9-4158-bdd8-d782214a7549/volumes" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.655494 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.655472587 podStartE2EDuration="2.655472587s" podCreationTimestamp="2025-09-30 20:34:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:34:41.642937115 +0000 UTC m=+1266.758970252" watchObservedRunningTime="2025-09-30 20:34:41.655472587 +0000 UTC m=+1266.771505714" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.782072 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 30 20:34:41 crc kubenswrapper[4919]: W0930 20:34:41.790294 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a6ed554_1c0c_4d0e_9506_103517b7b065.slice/crio-efbe0c86dfc94abd8cd509f585f3ba6ea0cfe376a7a7be909278dce68148558f WatchSource:0}: Error finding container efbe0c86dfc94abd8cd509f585f3ba6ea0cfe376a7a7be909278dce68148558f: Status 404 returned error can't find the container with id efbe0c86dfc94abd8cd509f585f3ba6ea0cfe376a7a7be909278dce68148558f Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.888097 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": read tcp 10.217.0.2:52698->10.217.0.197:8775: read: connection reset by peer" Sep 30 20:34:41 crc kubenswrapper[4919]: I0930 20:34:41.888130 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": read tcp 10.217.0.2:52708->10.217.0.197:8775: read: connection reset by peer" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.347370 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.352777 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-combined-ca-bundle\") pod \"c335a9c2-3185-4ade-8048-c2dc570a2961\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.352854 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-config-data\") pod \"c335a9c2-3185-4ade-8048-c2dc570a2961\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.353014 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4g44f\" (UniqueName: \"kubernetes.io/projected/c335a9c2-3185-4ade-8048-c2dc570a2961-kube-api-access-4g44f\") pod \"c335a9c2-3185-4ade-8048-c2dc570a2961\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.353059 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c335a9c2-3185-4ade-8048-c2dc570a2961-logs\") pod \"c335a9c2-3185-4ade-8048-c2dc570a2961\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.353118 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-nova-metadata-tls-certs\") pod \"c335a9c2-3185-4ade-8048-c2dc570a2961\" (UID: \"c335a9c2-3185-4ade-8048-c2dc570a2961\") " Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.353939 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c335a9c2-3185-4ade-8048-c2dc570a2961-logs" (OuterVolumeSpecName: "logs") pod "c335a9c2-3185-4ade-8048-c2dc570a2961" (UID: "c335a9c2-3185-4ade-8048-c2dc570a2961"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.359939 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c335a9c2-3185-4ade-8048-c2dc570a2961-kube-api-access-4g44f" (OuterVolumeSpecName: "kube-api-access-4g44f") pod "c335a9c2-3185-4ade-8048-c2dc570a2961" (UID: "c335a9c2-3185-4ade-8048-c2dc570a2961"). InnerVolumeSpecName "kube-api-access-4g44f". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.401952 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c335a9c2-3185-4ade-8048-c2dc570a2961" (UID: "c335a9c2-3185-4ade-8048-c2dc570a2961"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.402875 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-config-data" (OuterVolumeSpecName: "config-data") pod "c335a9c2-3185-4ade-8048-c2dc570a2961" (UID: "c335a9c2-3185-4ade-8048-c2dc570a2961"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.444365 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c335a9c2-3185-4ade-8048-c2dc570a2961" (UID: "c335a9c2-3185-4ade-8048-c2dc570a2961"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.455403 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.455446 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.455458 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4g44f\" (UniqueName: \"kubernetes.io/projected/c335a9c2-3185-4ade-8048-c2dc570a2961-kube-api-access-4g44f\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.455471 4919 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c335a9c2-3185-4ade-8048-c2dc570a2961-logs\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.455484 4919 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c335a9c2-3185-4ade-8048-c2dc570a2961-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.642928 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4a6ed554-1c0c-4d0e-9506-103517b7b065","Type":"ContainerStarted","Data":"e238dcbaa5b2dfcb154cab4c3b691f799b0019cd94cbcbf2ba4d602db0e7c640"} Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.642972 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4a6ed554-1c0c-4d0e-9506-103517b7b065","Type":"ContainerStarted","Data":"efbe0c86dfc94abd8cd509f585f3ba6ea0cfe376a7a7be909278dce68148558f"} Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.653789 4919 generic.go:334] "Generic (PLEG): container finished" podID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerID="585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628" exitCode=0 Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.654109 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.654114 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c335a9c2-3185-4ade-8048-c2dc570a2961","Type":"ContainerDied","Data":"585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628"} Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.654148 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"c335a9c2-3185-4ade-8048-c2dc570a2961","Type":"ContainerDied","Data":"472ff519998969de9f8921f801d551d5b645511d9812fad24b0ecaec0b09a9a2"} Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.654171 4919 scope.go:117] "RemoveContainer" containerID="585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.664652 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.664607977 podStartE2EDuration="2.664607977s" podCreationTimestamp="2025-09-30 20:34:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:34:42.656975367 +0000 UTC m=+1267.773008514" watchObservedRunningTime="2025-09-30 20:34:42.664607977 +0000 UTC m=+1267.780641114" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.684444 4919 scope.go:117] "RemoveContainer" containerID="769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.711278 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.717259 4919 scope.go:117] "RemoveContainer" containerID="585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628" Sep 30 20:34:42 crc kubenswrapper[4919]: E0930 20:34:42.717743 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628\": container with ID starting with 585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628 not found: ID does not exist" containerID="585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.717775 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628"} err="failed to get container status \"585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628\": rpc error: code = NotFound desc = could not find container \"585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628\": container with ID starting with 585a053ee58ffa34f76b1eacf325d481b6ede231c409a66605f1c6fedd86d628 not found: ID does not exist" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.717797 4919 scope.go:117] "RemoveContainer" containerID="769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2" Sep 30 20:34:42 crc kubenswrapper[4919]: E0930 20:34:42.719307 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2\": container with ID starting with 769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2 not found: ID does not exist" containerID="769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.719349 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2"} err="failed to get container status \"769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2\": rpc error: code = NotFound desc = could not find container \"769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2\": container with ID starting with 769f3bc26aa53539a24cc5ce0e43a1da5fef0563bbd6e2b73e0cd89a9546dfe2 not found: ID does not exist" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.730137 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.738336 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:34:42 crc kubenswrapper[4919]: E0930 20:34:42.738986 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerName="nova-metadata-metadata" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.739092 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerName="nova-metadata-metadata" Sep 30 20:34:42 crc kubenswrapper[4919]: E0930 20:34:42.739186 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerName="nova-metadata-log" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.739262 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerName="nova-metadata-log" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.739587 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerName="nova-metadata-metadata" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.739673 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" containerName="nova-metadata-log" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.741013 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.744286 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.744452 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.756754 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.763320 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f40f126-0a68-488f-ae68-c56dc4581bd1-config-data\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.763554 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npsmt\" (UniqueName: \"kubernetes.io/projected/5f40f126-0a68-488f-ae68-c56dc4581bd1-kube-api-access-npsmt\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.763718 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f40f126-0a68-488f-ae68-c56dc4581bd1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.763830 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f40f126-0a68-488f-ae68-c56dc4581bd1-logs\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.763932 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f40f126-0a68-488f-ae68-c56dc4581bd1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.864620 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npsmt\" (UniqueName: \"kubernetes.io/projected/5f40f126-0a68-488f-ae68-c56dc4581bd1-kube-api-access-npsmt\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.864705 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f40f126-0a68-488f-ae68-c56dc4581bd1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.864942 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f40f126-0a68-488f-ae68-c56dc4581bd1-logs\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.865001 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f40f126-0a68-488f-ae68-c56dc4581bd1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.865060 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f40f126-0a68-488f-ae68-c56dc4581bd1-config-data\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.866065 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f40f126-0a68-488f-ae68-c56dc4581bd1-logs\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.871060 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f40f126-0a68-488f-ae68-c56dc4581bd1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.871340 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f40f126-0a68-488f-ae68-c56dc4581bd1-config-data\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.872159 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f40f126-0a68-488f-ae68-c56dc4581bd1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:42 crc kubenswrapper[4919]: I0930 20:34:42.882724 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npsmt\" (UniqueName: \"kubernetes.io/projected/5f40f126-0a68-488f-ae68-c56dc4581bd1-kube-api-access-npsmt\") pod \"nova-metadata-0\" (UID: \"5f40f126-0a68-488f-ae68-c56dc4581bd1\") " pod="openstack/nova-metadata-0" Sep 30 20:34:43 crc kubenswrapper[4919]: I0930 20:34:43.069971 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 30 20:34:44 crc kubenswrapper[4919]: I0930 20:34:43.538207 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 30 20:34:44 crc kubenswrapper[4919]: I0930 20:34:43.660628 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c335a9c2-3185-4ade-8048-c2dc570a2961" path="/var/lib/kubelet/pods/c335a9c2-3185-4ade-8048-c2dc570a2961/volumes" Sep 30 20:34:44 crc kubenswrapper[4919]: I0930 20:34:43.680464 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5f40f126-0a68-488f-ae68-c56dc4581bd1","Type":"ContainerStarted","Data":"27fd67134917dc57dc6235e37ac8820b5341d6c4bcea6638bba6651830ab4f56"} Sep 30 20:34:44 crc kubenswrapper[4919]: I0930 20:34:44.700986 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5f40f126-0a68-488f-ae68-c56dc4581bd1","Type":"ContainerStarted","Data":"ea7239c9766a9d33e5e067ffff6057daaaf149b7e46352fedbb2118adaac3dfe"} Sep 30 20:34:44 crc kubenswrapper[4919]: I0930 20:34:44.701541 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5f40f126-0a68-488f-ae68-c56dc4581bd1","Type":"ContainerStarted","Data":"83e9375d8f2c6e8c753c6331f289a608e8dc552400e42665e3d14eadca53d41e"} Sep 30 20:34:44 crc kubenswrapper[4919]: I0930 20:34:44.745808 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.74576896 podStartE2EDuration="2.74576896s" podCreationTimestamp="2025-09-30 20:34:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:34:44.739651944 +0000 UTC m=+1269.855685101" watchObservedRunningTime="2025-09-30 20:34:44.74576896 +0000 UTC m=+1269.861802118" Sep 30 20:34:46 crc kubenswrapper[4919]: I0930 20:34:46.346423 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 30 20:34:48 crc kubenswrapper[4919]: I0930 20:34:48.070700 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 20:34:48 crc kubenswrapper[4919]: I0930 20:34:48.071009 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 30 20:34:49 crc kubenswrapper[4919]: I0930 20:34:49.983537 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:34:49 crc kubenswrapper[4919]: I0930 20:34:49.984030 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 30 20:34:50 crc kubenswrapper[4919]: I0930 20:34:50.997495 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0218baef-cb5d-45c2-8a23-bb06a2887c7b" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:34:50 crc kubenswrapper[4919]: I0930 20:34:50.997508 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0218baef-cb5d-45c2-8a23-bb06a2887c7b" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:34:51 crc kubenswrapper[4919]: I0930 20:34:51.346552 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 30 20:34:51 crc kubenswrapper[4919]: I0930 20:34:51.384477 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 30 20:34:51 crc kubenswrapper[4919]: I0930 20:34:51.884884 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 30 20:34:53 crc kubenswrapper[4919]: I0930 20:34:53.070897 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 20:34:53 crc kubenswrapper[4919]: I0930 20:34:53.070960 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 30 20:34:54 crc kubenswrapper[4919]: I0930 20:34:54.088502 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5f40f126-0a68-488f-ae68-c56dc4581bd1" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.208:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:34:54 crc kubenswrapper[4919]: I0930 20:34:54.088523 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5f40f126-0a68-488f-ae68-c56dc4581bd1" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.208:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 30 20:34:56 crc kubenswrapper[4919]: I0930 20:34:56.062180 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:34:56 crc kubenswrapper[4919]: I0930 20:34:56.062669 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:34:57 crc kubenswrapper[4919]: I0930 20:34:57.752797 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 20:34:59 crc kubenswrapper[4919]: I0930 20:34:59.990680 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 20:35:00 crc kubenswrapper[4919]: I0930 20:34:59.997469 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 30 20:35:00 crc kubenswrapper[4919]: I0930 20:34:59.998003 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 20:35:00 crc kubenswrapper[4919]: I0930 20:34:59.998037 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 30 20:35:00 crc kubenswrapper[4919]: I0930 20:35:00.006620 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 20:35:00 crc kubenswrapper[4919]: I0930 20:35:00.010139 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 30 20:35:03 crc kubenswrapper[4919]: I0930 20:35:03.088297 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 20:35:03 crc kubenswrapper[4919]: I0930 20:35:03.089120 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 30 20:35:03 crc kubenswrapper[4919]: I0930 20:35:03.101633 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 20:35:03 crc kubenswrapper[4919]: I0930 20:35:03.103396 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 30 20:35:14 crc kubenswrapper[4919]: I0930 20:35:14.506518 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:35:14 crc kubenswrapper[4919]: I0930 20:35:14.507406 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="ceilometer-central-agent" containerID="cri-o://cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1" gracePeriod=30 Sep 30 20:35:14 crc kubenswrapper[4919]: I0930 20:35:14.507585 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="ceilometer-notification-agent" containerID="cri-o://6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740" gracePeriod=30 Sep 30 20:35:14 crc kubenswrapper[4919]: I0930 20:35:14.507606 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="sg-core" containerID="cri-o://25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f" gracePeriod=30 Sep 30 20:35:14 crc kubenswrapper[4919]: I0930 20:35:14.507652 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="proxy-httpd" containerID="cri-o://347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714" gracePeriod=30 Sep 30 20:35:15 crc kubenswrapper[4919]: I0930 20:35:15.141390 4919 generic.go:334] "Generic (PLEG): container finished" podID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerID="347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714" exitCode=0 Sep 30 20:35:15 crc kubenswrapper[4919]: I0930 20:35:15.141693 4919 generic.go:334] "Generic (PLEG): container finished" podID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerID="25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f" exitCode=2 Sep 30 20:35:15 crc kubenswrapper[4919]: I0930 20:35:15.141702 4919 generic.go:334] "Generic (PLEG): container finished" podID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerID="cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1" exitCode=0 Sep 30 20:35:15 crc kubenswrapper[4919]: I0930 20:35:15.141438 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"972acd61-5033-4cc0-96a0-65a1e5a4c9f6","Type":"ContainerDied","Data":"347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714"} Sep 30 20:35:15 crc kubenswrapper[4919]: I0930 20:35:15.141737 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"972acd61-5033-4cc0-96a0-65a1e5a4c9f6","Type":"ContainerDied","Data":"25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f"} Sep 30 20:35:15 crc kubenswrapper[4919]: I0930 20:35:15.141751 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"972acd61-5033-4cc0-96a0-65a1e5a4c9f6","Type":"ContainerDied","Data":"cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1"} Sep 30 20:35:15 crc kubenswrapper[4919]: I0930 20:35:15.390193 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:35:16 crc kubenswrapper[4919]: I0930 20:35:16.208010 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.779593 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.886732 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-combined-ca-bundle\") pod \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.887017 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-sg-core-conf-yaml\") pod \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.893356 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-log-httpd\") pod \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.893468 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lbf2\" (UniqueName: \"kubernetes.io/projected/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-kube-api-access-2lbf2\") pod \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.893493 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-run-httpd\") pod \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.893541 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-config-data\") pod \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.893630 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-scripts\") pod \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.893666 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-ceilometer-tls-certs\") pod \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\" (UID: \"972acd61-5033-4cc0-96a0-65a1e5a4c9f6\") " Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.894559 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "972acd61-5033-4cc0-96a0-65a1e5a4c9f6" (UID: "972acd61-5033-4cc0-96a0-65a1e5a4c9f6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.894579 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "972acd61-5033-4cc0-96a0-65a1e5a4c9f6" (UID: "972acd61-5033-4cc0-96a0-65a1e5a4c9f6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.898294 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-kube-api-access-2lbf2" (OuterVolumeSpecName: "kube-api-access-2lbf2") pod "972acd61-5033-4cc0-96a0-65a1e5a4c9f6" (UID: "972acd61-5033-4cc0-96a0-65a1e5a4c9f6"). InnerVolumeSpecName "kube-api-access-2lbf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.899012 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lbf2\" (UniqueName: \"kubernetes.io/projected/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-kube-api-access-2lbf2\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.899034 4919 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.899044 4919 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.918974 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-scripts" (OuterVolumeSpecName: "scripts") pod "972acd61-5033-4cc0-96a0-65a1e5a4c9f6" (UID: "972acd61-5033-4cc0-96a0-65a1e5a4c9f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:35:17 crc kubenswrapper[4919]: I0930 20:35:17.995557 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "972acd61-5033-4cc0-96a0-65a1e5a4c9f6" (UID: "972acd61-5033-4cc0-96a0-65a1e5a4c9f6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.000498 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.000531 4919 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.006378 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "972acd61-5033-4cc0-96a0-65a1e5a4c9f6" (UID: "972acd61-5033-4cc0-96a0-65a1e5a4c9f6"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.040866 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "972acd61-5033-4cc0-96a0-65a1e5a4c9f6" (UID: "972acd61-5033-4cc0-96a0-65a1e5a4c9f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.068575 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-config-data" (OuterVolumeSpecName: "config-data") pod "972acd61-5033-4cc0-96a0-65a1e5a4c9f6" (UID: "972acd61-5033-4cc0-96a0-65a1e5a4c9f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.101880 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.101921 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.101941 4919 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/972acd61-5033-4cc0-96a0-65a1e5a4c9f6-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.169279 4919 generic.go:334] "Generic (PLEG): container finished" podID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerID="6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740" exitCode=0 Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.169311 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.169323 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"972acd61-5033-4cc0-96a0-65a1e5a4c9f6","Type":"ContainerDied","Data":"6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740"} Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.169485 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"972acd61-5033-4cc0-96a0-65a1e5a4c9f6","Type":"ContainerDied","Data":"98d18c26c7c00acd83de0f5d2d6b4bce15687ba495b89ad1009b97a162969643"} Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.169508 4919 scope.go:117] "RemoveContainer" containerID="347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.199587 4919 scope.go:117] "RemoveContainer" containerID="25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.222373 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.234566 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.246718 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:35:18 crc kubenswrapper[4919]: E0930 20:35:18.247170 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="sg-core" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.247188 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="sg-core" Sep 30 20:35:18 crc kubenswrapper[4919]: E0930 20:35:18.247301 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="ceilometer-notification-agent" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.247312 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="ceilometer-notification-agent" Sep 30 20:35:18 crc kubenswrapper[4919]: E0930 20:35:18.247326 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="proxy-httpd" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.247333 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="proxy-httpd" Sep 30 20:35:18 crc kubenswrapper[4919]: E0930 20:35:18.247348 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="ceilometer-central-agent" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.247354 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="ceilometer-central-agent" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.247519 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="proxy-httpd" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.247539 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="sg-core" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.247558 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="ceilometer-notification-agent" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.247567 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" containerName="ceilometer-central-agent" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.249288 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.253540 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.253638 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.253746 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.255094 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.281816 4919 scope.go:117] "RemoveContainer" containerID="6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.307547 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.307615 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-run-httpd\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.307751 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-config-data\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.307936 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.308138 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f87zl\" (UniqueName: \"kubernetes.io/projected/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-kube-api-access-f87zl\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.308185 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-scripts\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.308240 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-log-httpd\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.308271 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.317421 4919 scope.go:117] "RemoveContainer" containerID="cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.395418 4919 scope.go:117] "RemoveContainer" containerID="347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714" Sep 30 20:35:18 crc kubenswrapper[4919]: E0930 20:35:18.398158 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714\": container with ID starting with 347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714 not found: ID does not exist" containerID="347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.398189 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714"} err="failed to get container status \"347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714\": rpc error: code = NotFound desc = could not find container \"347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714\": container with ID starting with 347f87b0178f5b050ead0c0cc9502c6c3157295f3d61806c3517bc9437077714 not found: ID does not exist" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.398223 4919 scope.go:117] "RemoveContainer" containerID="25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f" Sep 30 20:35:18 crc kubenswrapper[4919]: E0930 20:35:18.398393 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f\": container with ID starting with 25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f not found: ID does not exist" containerID="25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.398426 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f"} err="failed to get container status \"25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f\": rpc error: code = NotFound desc = could not find container \"25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f\": container with ID starting with 25e63799986ad546e2ec933d1465870ee20200013604b974481f063fd03e3e2f not found: ID does not exist" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.398438 4919 scope.go:117] "RemoveContainer" containerID="6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740" Sep 30 20:35:18 crc kubenswrapper[4919]: E0930 20:35:18.398698 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740\": container with ID starting with 6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740 not found: ID does not exist" containerID="6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.398717 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740"} err="failed to get container status \"6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740\": rpc error: code = NotFound desc = could not find container \"6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740\": container with ID starting with 6690b66c77b18e2a275202d3f8268f34920b7a8cb09ba2cbb5dfe4b539d47740 not found: ID does not exist" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.398730 4919 scope.go:117] "RemoveContainer" containerID="cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1" Sep 30 20:35:18 crc kubenswrapper[4919]: E0930 20:35:18.399192 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1\": container with ID starting with cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1 not found: ID does not exist" containerID="cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.399229 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1"} err="failed to get container status \"cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1\": rpc error: code = NotFound desc = could not find container \"cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1\": container with ID starting with cd9b64c391bc925d4a181465dd5a6374c6e84426ae6ee4cdcb52a54d96f11ba1 not found: ID does not exist" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.410271 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.410362 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f87zl\" (UniqueName: \"kubernetes.io/projected/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-kube-api-access-f87zl\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.410436 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-scripts\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.410484 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-log-httpd\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.410505 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.410542 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.410576 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-run-httpd\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.410607 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-config-data\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.410907 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-log-httpd\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.410980 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-run-httpd\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.415849 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-scripts\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.417867 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.418026 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-config-data\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.425856 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.429954 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.430066 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f87zl\" (UniqueName: \"kubernetes.io/projected/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-kube-api-access-f87zl\") pod \"ceilometer-0\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " pod="openstack/ceilometer-0" Sep 30 20:35:18 crc kubenswrapper[4919]: I0930 20:35:18.574596 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 20:35:19 crc kubenswrapper[4919]: I0930 20:35:19.086898 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 20:35:19 crc kubenswrapper[4919]: I0930 20:35:19.188911 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eb151a3b-de15-43b0-be4e-83b9bb2a35a6","Type":"ContainerStarted","Data":"b7986192215c33d6afcf280d01cd84a24e28cda858277fef2e0a198dca4b7787"} Sep 30 20:35:19 crc kubenswrapper[4919]: I0930 20:35:19.642543 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="972acd61-5033-4cc0-96a0-65a1e5a4c9f6" path="/var/lib/kubelet/pods/972acd61-5033-4cc0-96a0-65a1e5a4c9f6/volumes" Sep 30 20:35:19 crc kubenswrapper[4919]: I0930 20:35:19.825590 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="831f0cec-e526-41e4-851f-139ffef9bea5" containerName="rabbitmq" containerID="cri-o://38a5d3175d1cc4769ad239682d75e814c0498c53fc9db30bea0240c22b357552" gracePeriod=604796 Sep 30 20:35:20 crc kubenswrapper[4919]: I0930 20:35:20.554933 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="567de3cf-1a4f-426d-b4d5-da78ead6e923" containerName="rabbitmq" containerID="cri-o://60c286ef96249077e4f4a7963fc707dec52b96f550928c462035531baa60c398" gracePeriod=604796 Sep 30 20:35:22 crc kubenswrapper[4919]: I0930 20:35:22.408168 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="831f0cec-e526-41e4-851f-139ffef9bea5" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.108:5671: connect: connection refused" Sep 30 20:35:22 crc kubenswrapper[4919]: I0930 20:35:22.789022 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="567de3cf-1a4f-426d-b4d5-da78ead6e923" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.109:5671: connect: connection refused" Sep 30 20:35:26 crc kubenswrapper[4919]: I0930 20:35:26.061937 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:35:26 crc kubenswrapper[4919]: I0930 20:35:26.062545 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:35:26 crc kubenswrapper[4919]: I0930 20:35:26.062597 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:35:26 crc kubenswrapper[4919]: I0930 20:35:26.063511 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"50b0f3b522dc60e1e18fadcde7bd6a100190635e5277992da10bb56412db1f04"} pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:35:26 crc kubenswrapper[4919]: I0930 20:35:26.063645 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" containerID="cri-o://50b0f3b522dc60e1e18fadcde7bd6a100190635e5277992da10bb56412db1f04" gracePeriod=600 Sep 30 20:35:26 crc kubenswrapper[4919]: I0930 20:35:26.268240 4919 generic.go:334] "Generic (PLEG): container finished" podID="831f0cec-e526-41e4-851f-139ffef9bea5" containerID="38a5d3175d1cc4769ad239682d75e814c0498c53fc9db30bea0240c22b357552" exitCode=0 Sep 30 20:35:26 crc kubenswrapper[4919]: I0930 20:35:26.268313 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"831f0cec-e526-41e4-851f-139ffef9bea5","Type":"ContainerDied","Data":"38a5d3175d1cc4769ad239682d75e814c0498c53fc9db30bea0240c22b357552"} Sep 30 20:35:26 crc kubenswrapper[4919]: I0930 20:35:26.271700 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb371a63-6d82-453e-930e-656710b97f10" containerID="50b0f3b522dc60e1e18fadcde7bd6a100190635e5277992da10bb56412db1f04" exitCode=0 Sep 30 20:35:26 crc kubenswrapper[4919]: I0930 20:35:26.271736 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerDied","Data":"50b0f3b522dc60e1e18fadcde7bd6a100190635e5277992da10bb56412db1f04"} Sep 30 20:35:26 crc kubenswrapper[4919]: I0930 20:35:26.271762 4919 scope.go:117] "RemoveContainer" containerID="266a47211086852ebceb8347506c7f46056112506f6f3e1b6a4412456d9a3ed6" Sep 30 20:35:27 crc kubenswrapper[4919]: I0930 20:35:27.284602 4919 generic.go:334] "Generic (PLEG): container finished" podID="567de3cf-1a4f-426d-b4d5-da78ead6e923" containerID="60c286ef96249077e4f4a7963fc707dec52b96f550928c462035531baa60c398" exitCode=0 Sep 30 20:35:27 crc kubenswrapper[4919]: I0930 20:35:27.284656 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"567de3cf-1a4f-426d-b4d5-da78ead6e923","Type":"ContainerDied","Data":"60c286ef96249077e4f4a7963fc707dec52b96f550928c462035531baa60c398"} Sep 30 20:35:28 crc kubenswrapper[4919]: I0930 20:35:28.936959 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-77wv7"] Sep 30 20:35:28 crc kubenswrapper[4919]: I0930 20:35:28.941088 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:28 crc kubenswrapper[4919]: I0930 20:35:28.946789 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Sep 30 20:35:28 crc kubenswrapper[4919]: I0930 20:35:28.970894 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-77wv7"] Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.019571 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.019634 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-config\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.019727 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.019762 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.019796 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58x9k\" (UniqueName: \"kubernetes.io/projected/c57c7756-8fcd-43a3-8fdb-026c084f8f33-kube-api-access-58x9k\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.019891 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-dns-svc\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.019931 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.121953 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.122004 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.122038 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58x9k\" (UniqueName: \"kubernetes.io/projected/c57c7756-8fcd-43a3-8fdb-026c084f8f33-kube-api-access-58x9k\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.122100 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-dns-svc\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.122126 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.122161 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.122181 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-config\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.123025 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-ovsdbserver-nb\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.123127 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-config\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.123277 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-dns-svc\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.123319 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-dns-swift-storage-0\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.123764 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-ovsdbserver-sb\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.124200 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-openstack-edpm-ipam\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.163436 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58x9k\" (UniqueName: \"kubernetes.io/projected/c57c7756-8fcd-43a3-8fdb-026c084f8f33-kube-api-access-58x9k\") pod \"dnsmasq-dns-67b789f86c-77wv7\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.261840 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.390437 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.427952 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tb8xm\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-kube-api-access-tb8xm\") pod \"831f0cec-e526-41e4-851f-139ffef9bea5\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.428030 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-plugins\") pod \"831f0cec-e526-41e4-851f-139ffef9bea5\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.428098 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-confd\") pod \"831f0cec-e526-41e4-851f-139ffef9bea5\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.428184 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/831f0cec-e526-41e4-851f-139ffef9bea5-erlang-cookie-secret\") pod \"831f0cec-e526-41e4-851f-139ffef9bea5\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.428294 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-plugins-conf\") pod \"831f0cec-e526-41e4-851f-139ffef9bea5\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.428406 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"831f0cec-e526-41e4-851f-139ffef9bea5\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.428476 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-server-conf\") pod \"831f0cec-e526-41e4-851f-139ffef9bea5\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.428533 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/831f0cec-e526-41e4-851f-139ffef9bea5-pod-info\") pod \"831f0cec-e526-41e4-851f-139ffef9bea5\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.428578 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-erlang-cookie\") pod \"831f0cec-e526-41e4-851f-139ffef9bea5\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.428623 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-config-data\") pod \"831f0cec-e526-41e4-851f-139ffef9bea5\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.428657 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-tls\") pod \"831f0cec-e526-41e4-851f-139ffef9bea5\" (UID: \"831f0cec-e526-41e4-851f-139ffef9bea5\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.430673 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "831f0cec-e526-41e4-851f-139ffef9bea5" (UID: "831f0cec-e526-41e4-851f-139ffef9bea5"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.431995 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "831f0cec-e526-41e4-851f-139ffef9bea5" (UID: "831f0cec-e526-41e4-851f-139ffef9bea5"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.432872 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "831f0cec-e526-41e4-851f-139ffef9bea5" (UID: "831f0cec-e526-41e4-851f-139ffef9bea5"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.438284 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831f0cec-e526-41e4-851f-139ffef9bea5-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "831f0cec-e526-41e4-851f-139ffef9bea5" (UID: "831f0cec-e526-41e4-851f-139ffef9bea5"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.438462 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "831f0cec-e526-41e4-851f-139ffef9bea5" (UID: "831f0cec-e526-41e4-851f-139ffef9bea5"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.439426 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/831f0cec-e526-41e4-851f-139ffef9bea5-pod-info" (OuterVolumeSpecName: "pod-info") pod "831f0cec-e526-41e4-851f-139ffef9bea5" (UID: "831f0cec-e526-41e4-851f-139ffef9bea5"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.442794 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-kube-api-access-tb8xm" (OuterVolumeSpecName: "kube-api-access-tb8xm") pod "831f0cec-e526-41e4-851f-139ffef9bea5" (UID: "831f0cec-e526-41e4-851f-139ffef9bea5"). InnerVolumeSpecName "kube-api-access-tb8xm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.445909 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "831f0cec-e526-41e4-851f-139ffef9bea5" (UID: "831f0cec-e526-41e4-851f-139ffef9bea5"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.516414 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-config-data" (OuterVolumeSpecName: "config-data") pod "831f0cec-e526-41e4-851f-139ffef9bea5" (UID: "831f0cec-e526-41e4-851f-139ffef9bea5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.531440 4919 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/831f0cec-e526-41e4-851f-139ffef9bea5-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.531474 4919 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.531499 4919 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.531508 4919 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/831f0cec-e526-41e4-851f-139ffef9bea5-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.531519 4919 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.531529 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.531537 4919 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.531545 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tb8xm\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-kube-api-access-tb8xm\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.531553 4919 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.563484 4919 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.589189 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-server-conf" (OuterVolumeSpecName: "server-conf") pod "831f0cec-e526-41e4-851f-139ffef9bea5" (UID: "831f0cec-e526-41e4-851f-139ffef9bea5"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.636913 4919 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.637037 4919 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/831f0cec-e526-41e4-851f-139ffef9bea5-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.661123 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.678324 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "831f0cec-e526-41e4-851f-139ffef9bea5" (UID: "831f0cec-e526-41e4-851f-139ffef9bea5"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.737961 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-confd\") pod \"567de3cf-1a4f-426d-b4d5-da78ead6e923\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.738048 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-tls\") pod \"567de3cf-1a4f-426d-b4d5-da78ead6e923\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.738083 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-plugins\") pod \"567de3cf-1a4f-426d-b4d5-da78ead6e923\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.738120 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"567de3cf-1a4f-426d-b4d5-da78ead6e923\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.738154 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lszll\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-kube-api-access-lszll\") pod \"567de3cf-1a4f-426d-b4d5-da78ead6e923\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.738205 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/567de3cf-1a4f-426d-b4d5-da78ead6e923-pod-info\") pod \"567de3cf-1a4f-426d-b4d5-da78ead6e923\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.738249 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-config-data\") pod \"567de3cf-1a4f-426d-b4d5-da78ead6e923\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.738371 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-plugins-conf\") pod \"567de3cf-1a4f-426d-b4d5-da78ead6e923\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.738417 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/567de3cf-1a4f-426d-b4d5-da78ead6e923-erlang-cookie-secret\") pod \"567de3cf-1a4f-426d-b4d5-da78ead6e923\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.738447 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-server-conf\") pod \"567de3cf-1a4f-426d-b4d5-da78ead6e923\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.738475 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-erlang-cookie\") pod \"567de3cf-1a4f-426d-b4d5-da78ead6e923\" (UID: \"567de3cf-1a4f-426d-b4d5-da78ead6e923\") " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.738962 4919 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/831f0cec-e526-41e4-851f-139ffef9bea5-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.739189 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "567de3cf-1a4f-426d-b4d5-da78ead6e923" (UID: "567de3cf-1a4f-426d-b4d5-da78ead6e923"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.743494 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "567de3cf-1a4f-426d-b4d5-da78ead6e923" (UID: "567de3cf-1a4f-426d-b4d5-da78ead6e923"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.743697 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "567de3cf-1a4f-426d-b4d5-da78ead6e923" (UID: "567de3cf-1a4f-426d-b4d5-da78ead6e923"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.744536 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "567de3cf-1a4f-426d-b4d5-da78ead6e923" (UID: "567de3cf-1a4f-426d-b4d5-da78ead6e923"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.747548 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/567de3cf-1a4f-426d-b4d5-da78ead6e923-pod-info" (OuterVolumeSpecName: "pod-info") pod "567de3cf-1a4f-426d-b4d5-da78ead6e923" (UID: "567de3cf-1a4f-426d-b4d5-da78ead6e923"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.747563 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "567de3cf-1a4f-426d-b4d5-da78ead6e923" (UID: "567de3cf-1a4f-426d-b4d5-da78ead6e923"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.747600 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-kube-api-access-lszll" (OuterVolumeSpecName: "kube-api-access-lszll") pod "567de3cf-1a4f-426d-b4d5-da78ead6e923" (UID: "567de3cf-1a4f-426d-b4d5-da78ead6e923"). InnerVolumeSpecName "kube-api-access-lszll". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.747643 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/567de3cf-1a4f-426d-b4d5-da78ead6e923-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "567de3cf-1a4f-426d-b4d5-da78ead6e923" (UID: "567de3cf-1a4f-426d-b4d5-da78ead6e923"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.769202 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-config-data" (OuterVolumeSpecName: "config-data") pod "567de3cf-1a4f-426d-b4d5-da78ead6e923" (UID: "567de3cf-1a4f-426d-b4d5-da78ead6e923"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.798007 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-server-conf" (OuterVolumeSpecName: "server-conf") pod "567de3cf-1a4f-426d-b4d5-da78ead6e923" (UID: "567de3cf-1a4f-426d-b4d5-da78ead6e923"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.841505 4919 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.841829 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lszll\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-kube-api-access-lszll\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.841929 4919 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/567de3cf-1a4f-426d-b4d5-da78ead6e923-pod-info\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.842139 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.842241 4919 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.842346 4919 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/567de3cf-1a4f-426d-b4d5-da78ead6e923-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.842609 4919 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/567de3cf-1a4f-426d-b4d5-da78ead6e923-server-conf\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.842716 4919 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.842816 4919 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.842900 4919 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.865400 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "567de3cf-1a4f-426d-b4d5-da78ead6e923" (UID: "567de3cf-1a4f-426d-b4d5-da78ead6e923"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.869042 4919 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.944632 4919 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:29 crc kubenswrapper[4919]: I0930 20:35:29.944667 4919 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/567de3cf-1a4f-426d-b4d5-da78ead6e923-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.014201 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-77wv7"] Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.316724 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"831f0cec-e526-41e4-851f-139ffef9bea5","Type":"ContainerDied","Data":"fc485ce0ed183b024edf582d94bc1179466f05349f5295c49f9e92450845c274"} Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.317161 4919 scope.go:117] "RemoveContainer" containerID="38a5d3175d1cc4769ad239682d75e814c0498c53fc9db30bea0240c22b357552" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.316979 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.328333 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3"} Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.338796 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eb151a3b-de15-43b0-be4e-83b9bb2a35a6","Type":"ContainerStarted","Data":"ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc"} Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.342420 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"567de3cf-1a4f-426d-b4d5-da78ead6e923","Type":"ContainerDied","Data":"2cd648d520800c8309b5eafea58661f67c11f60b84c6ae967651d23bc59e6825"} Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.344693 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.352831 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" event={"ID":"c57c7756-8fcd-43a3-8fdb-026c084f8f33","Type":"ContainerStarted","Data":"cdc8cd267904dcdda5e160957fb71b130820fd2116a643379967ad715d0d7ef6"} Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.383976 4919 scope.go:117] "RemoveContainer" containerID="93e7c638bdaeea46130f5231c2e87f66af1eac382e728abedd402b1d49dd981f" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.429226 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.443394 4919 scope.go:117] "RemoveContainer" containerID="60c286ef96249077e4f4a7963fc707dec52b96f550928c462035531baa60c398" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.447037 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.470280 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.476163 4919 scope.go:117] "RemoveContainer" containerID="2a760f9f87e08a592631ecf86976cc2522b7c1236f4dead9c79de8addb7bc69a" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.485980 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.498761 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:35:30 crc kubenswrapper[4919]: E0930 20:35:30.499158 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="831f0cec-e526-41e4-851f-139ffef9bea5" containerName="setup-container" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.499169 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="831f0cec-e526-41e4-851f-139ffef9bea5" containerName="setup-container" Sep 30 20:35:30 crc kubenswrapper[4919]: E0930 20:35:30.499196 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="567de3cf-1a4f-426d-b4d5-da78ead6e923" containerName="setup-container" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.499202 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="567de3cf-1a4f-426d-b4d5-da78ead6e923" containerName="setup-container" Sep 30 20:35:30 crc kubenswrapper[4919]: E0930 20:35:30.499230 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="567de3cf-1a4f-426d-b4d5-da78ead6e923" containerName="rabbitmq" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.499238 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="567de3cf-1a4f-426d-b4d5-da78ead6e923" containerName="rabbitmq" Sep 30 20:35:30 crc kubenswrapper[4919]: E0930 20:35:30.499258 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="831f0cec-e526-41e4-851f-139ffef9bea5" containerName="rabbitmq" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.499264 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="831f0cec-e526-41e4-851f-139ffef9bea5" containerName="rabbitmq" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.499424 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="831f0cec-e526-41e4-851f-139ffef9bea5" containerName="rabbitmq" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.499435 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="567de3cf-1a4f-426d-b4d5-da78ead6e923" containerName="rabbitmq" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.500392 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.512767 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.512976 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.513059 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.513166 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.513275 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.513306 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.513435 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-rb9r4" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.513569 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.514901 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.521006 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.521361 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.521447 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-8dgqf" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.526453 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.526638 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.526753 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.526903 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.530295 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.553551 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.558617 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.558730 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.558825 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.558908 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.559007 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thqjv\" (UniqueName: \"kubernetes.io/projected/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-kube-api-access-thqjv\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.559113 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.559203 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.559410 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n7w2\" (UniqueName: \"kubernetes.io/projected/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-kube-api-access-7n7w2\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.559502 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.560307 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.560897 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.560979 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.561116 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.561201 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.561300 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.561392 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.561479 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.561544 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-config-data\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.561603 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.561662 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.561722 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.561799 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.663060 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.663475 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.664370 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.664518 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.664645 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.664791 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.664915 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thqjv\" (UniqueName: \"kubernetes.io/projected/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-kube-api-access-thqjv\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665053 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665169 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665246 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.664949 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665303 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n7w2\" (UniqueName: \"kubernetes.io/projected/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-kube-api-access-7n7w2\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665369 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665393 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665432 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665453 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665482 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665504 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665526 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665533 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-server-conf\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665545 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665594 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665614 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-config-data\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665622 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665634 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665657 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.665672 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.666406 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.666597 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.666683 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.667580 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.668353 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-config-data\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.668628 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.668933 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.672588 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.672591 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.672904 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.672917 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.673085 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-pod-info\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.681127 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.681963 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.687860 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.687998 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thqjv\" (UniqueName: \"kubernetes.io/projected/743c3f7e-1714-48ce-85ba-bf201f5b1c8c-kube-api-access-thqjv\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.688085 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n7w2\" (UniqueName: \"kubernetes.io/projected/edc716fe-90af-4fa2-a733-d4c3fc3e76b9-kube-api-access-7n7w2\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.697615 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"edc716fe-90af-4fa2-a733-d4c3fc3e76b9\") " pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.705965 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"743c3f7e-1714-48ce-85ba-bf201f5b1c8c\") " pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.831822 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 30 20:35:30 crc kubenswrapper[4919]: I0930 20:35:30.856582 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:35:31 crc kubenswrapper[4919]: I0930 20:35:31.346477 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 30 20:35:31 crc kubenswrapper[4919]: I0930 20:35:31.370730 4919 generic.go:334] "Generic (PLEG): container finished" podID="c57c7756-8fcd-43a3-8fdb-026c084f8f33" containerID="869bbf79d77e0cfe6ce2fa4b886fb0534e18897c889a29ffc64ef3515eb55059" exitCode=0 Sep 30 20:35:31 crc kubenswrapper[4919]: I0930 20:35:31.370922 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" event={"ID":"c57c7756-8fcd-43a3-8fdb-026c084f8f33","Type":"ContainerDied","Data":"869bbf79d77e0cfe6ce2fa4b886fb0534e18897c889a29ffc64ef3515eb55059"} Sep 30 20:35:31 crc kubenswrapper[4919]: I0930 20:35:31.380765 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eb151a3b-de15-43b0-be4e-83b9bb2a35a6","Type":"ContainerStarted","Data":"5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e"} Sep 30 20:35:31 crc kubenswrapper[4919]: I0930 20:35:31.380877 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eb151a3b-de15-43b0-be4e-83b9bb2a35a6","Type":"ContainerStarted","Data":"3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c"} Sep 30 20:35:31 crc kubenswrapper[4919]: I0930 20:35:31.442066 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 30 20:35:31 crc kubenswrapper[4919]: I0930 20:35:31.642769 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="567de3cf-1a4f-426d-b4d5-da78ead6e923" path="/var/lib/kubelet/pods/567de3cf-1a4f-426d-b4d5-da78ead6e923/volumes" Sep 30 20:35:31 crc kubenswrapper[4919]: I0930 20:35:31.644101 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="831f0cec-e526-41e4-851f-139ffef9bea5" path="/var/lib/kubelet/pods/831f0cec-e526-41e4-851f-139ffef9bea5/volumes" Sep 30 20:35:32 crc kubenswrapper[4919]: I0930 20:35:32.393449 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"743c3f7e-1714-48ce-85ba-bf201f5b1c8c","Type":"ContainerStarted","Data":"348044aa384bf0501bc95eee2db22d961cbf5b5196b939a9fce25ffbb399eb59"} Sep 30 20:35:32 crc kubenswrapper[4919]: I0930 20:35:32.395415 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"edc716fe-90af-4fa2-a733-d4c3fc3e76b9","Type":"ContainerStarted","Data":"1e69628655ccd787ea19669eea211755d4f8554fcf60c916f6ac03bfbf2ddca6"} Sep 30 20:35:32 crc kubenswrapper[4919]: I0930 20:35:32.398176 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" event={"ID":"c57c7756-8fcd-43a3-8fdb-026c084f8f33","Type":"ContainerStarted","Data":"37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b"} Sep 30 20:35:32 crc kubenswrapper[4919]: I0930 20:35:32.398393 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:33 crc kubenswrapper[4919]: I0930 20:35:33.413737 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"edc716fe-90af-4fa2-a733-d4c3fc3e76b9","Type":"ContainerStarted","Data":"df815875ed774145089f488499287c233037f880fd0888aa0475110a1a3cfc06"} Sep 30 20:35:33 crc kubenswrapper[4919]: I0930 20:35:33.465815 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" podStartSLOduration=5.465786194 podStartE2EDuration="5.465786194s" podCreationTimestamp="2025-09-30 20:35:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:35:32.424423583 +0000 UTC m=+1317.540456720" watchObservedRunningTime="2025-09-30 20:35:33.465786194 +0000 UTC m=+1318.581819361" Sep 30 20:35:34 crc kubenswrapper[4919]: I0930 20:35:34.430147 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eb151a3b-de15-43b0-be4e-83b9bb2a35a6","Type":"ContainerStarted","Data":"f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec"} Sep 30 20:35:34 crc kubenswrapper[4919]: I0930 20:35:34.431067 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 20:35:34 crc kubenswrapper[4919]: I0930 20:35:34.433397 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"743c3f7e-1714-48ce-85ba-bf201f5b1c8c","Type":"ContainerStarted","Data":"672ffe5cce8c6ebee3b9689123238d4dab185c6c2828046a4f05b3c14394e3b1"} Sep 30 20:35:34 crc kubenswrapper[4919]: I0930 20:35:34.480562 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.012828074 podStartE2EDuration="16.480543208s" podCreationTimestamp="2025-09-30 20:35:18 +0000 UTC" firstStartedPulling="2025-09-30 20:35:19.085574615 +0000 UTC m=+1304.201607762" lastFinishedPulling="2025-09-30 20:35:32.553289749 +0000 UTC m=+1317.669322896" observedRunningTime="2025-09-30 20:35:34.462331677 +0000 UTC m=+1319.578364844" watchObservedRunningTime="2025-09-30 20:35:34.480543208 +0000 UTC m=+1319.596576345" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.263897 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.335842 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-9x4kk"] Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.336552 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" podUID="38a281fa-ebba-4dab-92eb-26a591f96dc4" containerName="dnsmasq-dns" containerID="cri-o://b71bcd8bd2e9511a735ac627b3d781b42898ac5f98c3e179ec841e19fe76fa0a" gracePeriod=10 Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.510422 4919 generic.go:334] "Generic (PLEG): container finished" podID="38a281fa-ebba-4dab-92eb-26a591f96dc4" containerID="b71bcd8bd2e9511a735ac627b3d781b42898ac5f98c3e179ec841e19fe76fa0a" exitCode=0 Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.510461 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" event={"ID":"38a281fa-ebba-4dab-92eb-26a591f96dc4","Type":"ContainerDied","Data":"b71bcd8bd2e9511a735ac627b3d781b42898ac5f98c3e179ec841e19fe76fa0a"} Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.579202 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-lh6l6"] Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.580818 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.597934 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-lh6l6"] Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.755441 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-config\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.755510 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjr4r\" (UniqueName: \"kubernetes.io/projected/0329240c-67c8-4d59-97ee-17350f696ce2-kube-api-access-wjr4r\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.755564 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.755618 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.755637 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.756317 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.756346 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.857661 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-config\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.857969 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjr4r\" (UniqueName: \"kubernetes.io/projected/0329240c-67c8-4d59-97ee-17350f696ce2-kube-api-access-wjr4r\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.858001 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.858050 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.858076 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.858117 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.858142 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.858629 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-config\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.858788 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-ovsdbserver-nb\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.859227 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-ovsdbserver-sb\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.859365 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-dns-swift-storage-0\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.859939 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-openstack-edpm-ipam\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.860096 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0329240c-67c8-4d59-97ee-17350f696ce2-dns-svc\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.879009 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjr4r\" (UniqueName: \"kubernetes.io/projected/0329240c-67c8-4d59-97ee-17350f696ce2-kube-api-access-wjr4r\") pod \"dnsmasq-dns-cb6ffcf87-lh6l6\" (UID: \"0329240c-67c8-4d59-97ee-17350f696ce2\") " pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.900608 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.928087 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.960148 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-dns-svc\") pod \"38a281fa-ebba-4dab-92eb-26a591f96dc4\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.960325 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-dns-swift-storage-0\") pod \"38a281fa-ebba-4dab-92eb-26a591f96dc4\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.960395 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-ovsdbserver-nb\") pod \"38a281fa-ebba-4dab-92eb-26a591f96dc4\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.960427 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-config\") pod \"38a281fa-ebba-4dab-92eb-26a591f96dc4\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.960528 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlkbc\" (UniqueName: \"kubernetes.io/projected/38a281fa-ebba-4dab-92eb-26a591f96dc4-kube-api-access-wlkbc\") pod \"38a281fa-ebba-4dab-92eb-26a591f96dc4\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.960571 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-ovsdbserver-sb\") pod \"38a281fa-ebba-4dab-92eb-26a591f96dc4\" (UID: \"38a281fa-ebba-4dab-92eb-26a591f96dc4\") " Sep 30 20:35:39 crc kubenswrapper[4919]: I0930 20:35:39.980927 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38a281fa-ebba-4dab-92eb-26a591f96dc4-kube-api-access-wlkbc" (OuterVolumeSpecName: "kube-api-access-wlkbc") pod "38a281fa-ebba-4dab-92eb-26a591f96dc4" (UID: "38a281fa-ebba-4dab-92eb-26a591f96dc4"). InnerVolumeSpecName "kube-api-access-wlkbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.040478 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "38a281fa-ebba-4dab-92eb-26a591f96dc4" (UID: "38a281fa-ebba-4dab-92eb-26a591f96dc4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.049102 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-config" (OuterVolumeSpecName: "config") pod "38a281fa-ebba-4dab-92eb-26a591f96dc4" (UID: "38a281fa-ebba-4dab-92eb-26a591f96dc4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.052117 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "38a281fa-ebba-4dab-92eb-26a591f96dc4" (UID: "38a281fa-ebba-4dab-92eb-26a591f96dc4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.061727 4919 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.061761 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.061770 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlkbc\" (UniqueName: \"kubernetes.io/projected/38a281fa-ebba-4dab-92eb-26a591f96dc4-kube-api-access-wlkbc\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.061781 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.074348 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "38a281fa-ebba-4dab-92eb-26a591f96dc4" (UID: "38a281fa-ebba-4dab-92eb-26a591f96dc4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.098273 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "38a281fa-ebba-4dab-92eb-26a591f96dc4" (UID: "38a281fa-ebba-4dab-92eb-26a591f96dc4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.163296 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.163323 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/38a281fa-ebba-4dab-92eb-26a591f96dc4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.368436 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cb6ffcf87-lh6l6"] Sep 30 20:35:40 crc kubenswrapper[4919]: W0930 20:35:40.379924 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0329240c_67c8_4d59_97ee_17350f696ce2.slice/crio-b9e9322f89d5335cd37fff48f27ac62c7bc3ab63ae0badc92248bd9c5f6f126a WatchSource:0}: Error finding container b9e9322f89d5335cd37fff48f27ac62c7bc3ab63ae0badc92248bd9c5f6f126a: Status 404 returned error can't find the container with id b9e9322f89d5335cd37fff48f27ac62c7bc3ab63ae0badc92248bd9c5f6f126a Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.533464 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" event={"ID":"0329240c-67c8-4d59-97ee-17350f696ce2","Type":"ContainerStarted","Data":"b9e9322f89d5335cd37fff48f27ac62c7bc3ab63ae0badc92248bd9c5f6f126a"} Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.536583 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" event={"ID":"38a281fa-ebba-4dab-92eb-26a591f96dc4","Type":"ContainerDied","Data":"1054315d2935fda7dedf5b2f476d270539c7a7b265749686f2a443133812391d"} Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.536673 4919 scope.go:117] "RemoveContainer" containerID="b71bcd8bd2e9511a735ac627b3d781b42898ac5f98c3e179ec841e19fe76fa0a" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.536689 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-59cf4bdb65-9x4kk" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.600795 4919 scope.go:117] "RemoveContainer" containerID="502ff1a9c34510a92f87b0b2a26e84c4ff4ae3dcbb2ac4512b3c745db43be140" Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.621401 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-9x4kk"] Sep 30 20:35:40 crc kubenswrapper[4919]: I0930 20:35:40.630620 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-59cf4bdb65-9x4kk"] Sep 30 20:35:41 crc kubenswrapper[4919]: I0930 20:35:41.551948 4919 generic.go:334] "Generic (PLEG): container finished" podID="0329240c-67c8-4d59-97ee-17350f696ce2" containerID="c101102f46ef22c062f3b9583f4a68a874119107860829e27290532766c9f01b" exitCode=0 Sep 30 20:35:41 crc kubenswrapper[4919]: I0930 20:35:41.552038 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" event={"ID":"0329240c-67c8-4d59-97ee-17350f696ce2","Type":"ContainerDied","Data":"c101102f46ef22c062f3b9583f4a68a874119107860829e27290532766c9f01b"} Sep 30 20:35:41 crc kubenswrapper[4919]: I0930 20:35:41.646329 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38a281fa-ebba-4dab-92eb-26a591f96dc4" path="/var/lib/kubelet/pods/38a281fa-ebba-4dab-92eb-26a591f96dc4/volumes" Sep 30 20:35:42 crc kubenswrapper[4919]: I0930 20:35:42.581325 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" event={"ID":"0329240c-67c8-4d59-97ee-17350f696ce2","Type":"ContainerStarted","Data":"e1679ac5ce9529ecb74949eed5a8fcbc41a26606779c2475ede5b51a220250bc"} Sep 30 20:35:42 crc kubenswrapper[4919]: I0930 20:35:42.581758 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:42 crc kubenswrapper[4919]: I0930 20:35:42.624626 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" podStartSLOduration=3.624597985 podStartE2EDuration="3.624597985s" podCreationTimestamp="2025-09-30 20:35:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:35:42.613195823 +0000 UTC m=+1327.729228980" watchObservedRunningTime="2025-09-30 20:35:42.624597985 +0000 UTC m=+1327.740631152" Sep 30 20:35:48 crc kubenswrapper[4919]: I0930 20:35:48.587595 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 20:35:49 crc kubenswrapper[4919]: I0930 20:35:49.902520 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cb6ffcf87-lh6l6" Sep 30 20:35:49 crc kubenswrapper[4919]: I0930 20:35:49.973184 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-77wv7"] Sep 30 20:35:49 crc kubenswrapper[4919]: I0930 20:35:49.973541 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" podUID="c57c7756-8fcd-43a3-8fdb-026c084f8f33" containerName="dnsmasq-dns" containerID="cri-o://37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b" gracePeriod=10 Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.445677 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.632021 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-dns-swift-storage-0\") pod \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.632146 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-dns-svc\") pod \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.632960 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-openstack-edpm-ipam\") pod \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.633003 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58x9k\" (UniqueName: \"kubernetes.io/projected/c57c7756-8fcd-43a3-8fdb-026c084f8f33-kube-api-access-58x9k\") pod \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.633074 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-ovsdbserver-sb\") pod \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.633136 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-ovsdbserver-nb\") pod \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.633165 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-config\") pod \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\" (UID: \"c57c7756-8fcd-43a3-8fdb-026c084f8f33\") " Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.640979 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c57c7756-8fcd-43a3-8fdb-026c084f8f33-kube-api-access-58x9k" (OuterVolumeSpecName: "kube-api-access-58x9k") pod "c57c7756-8fcd-43a3-8fdb-026c084f8f33" (UID: "c57c7756-8fcd-43a3-8fdb-026c084f8f33"). InnerVolumeSpecName "kube-api-access-58x9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.713735 4919 generic.go:334] "Generic (PLEG): container finished" podID="c57c7756-8fcd-43a3-8fdb-026c084f8f33" containerID="37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b" exitCode=0 Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.713779 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" event={"ID":"c57c7756-8fcd-43a3-8fdb-026c084f8f33","Type":"ContainerDied","Data":"37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b"} Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.713812 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" event={"ID":"c57c7756-8fcd-43a3-8fdb-026c084f8f33","Type":"ContainerDied","Data":"cdc8cd267904dcdda5e160957fb71b130820fd2116a643379967ad715d0d7ef6"} Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.713830 4919 scope.go:117] "RemoveContainer" containerID="37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.714353 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67b789f86c-77wv7" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.719008 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c57c7756-8fcd-43a3-8fdb-026c084f8f33" (UID: "c57c7756-8fcd-43a3-8fdb-026c084f8f33"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.721682 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c57c7756-8fcd-43a3-8fdb-026c084f8f33" (UID: "c57c7756-8fcd-43a3-8fdb-026c084f8f33"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.731023 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c57c7756-8fcd-43a3-8fdb-026c084f8f33" (UID: "c57c7756-8fcd-43a3-8fdb-026c084f8f33"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.735698 4919 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.735724 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58x9k\" (UniqueName: \"kubernetes.io/projected/c57c7756-8fcd-43a3-8fdb-026c084f8f33-kube-api-access-58x9k\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.735734 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.735742 4919 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.737620 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-config" (OuterVolumeSpecName: "config") pod "c57c7756-8fcd-43a3-8fdb-026c084f8f33" (UID: "c57c7756-8fcd-43a3-8fdb-026c084f8f33"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.741814 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c57c7756-8fcd-43a3-8fdb-026c084f8f33" (UID: "c57c7756-8fcd-43a3-8fdb-026c084f8f33"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.764743 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "c57c7756-8fcd-43a3-8fdb-026c084f8f33" (UID: "c57c7756-8fcd-43a3-8fdb-026c084f8f33"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.799975 4919 scope.go:117] "RemoveContainer" containerID="869bbf79d77e0cfe6ce2fa4b886fb0534e18897c889a29ffc64ef3515eb55059" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.821325 4919 scope.go:117] "RemoveContainer" containerID="37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b" Sep 30 20:35:50 crc kubenswrapper[4919]: E0930 20:35:50.821931 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b\": container with ID starting with 37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b not found: ID does not exist" containerID="37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.821989 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b"} err="failed to get container status \"37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b\": rpc error: code = NotFound desc = could not find container \"37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b\": container with ID starting with 37a9c039e21ea821d30955987b1798bbfafc3bace1808aa5e72dad8964a5325b not found: ID does not exist" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.822023 4919 scope.go:117] "RemoveContainer" containerID="869bbf79d77e0cfe6ce2fa4b886fb0534e18897c889a29ffc64ef3515eb55059" Sep 30 20:35:50 crc kubenswrapper[4919]: E0930 20:35:50.822651 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"869bbf79d77e0cfe6ce2fa4b886fb0534e18897c889a29ffc64ef3515eb55059\": container with ID starting with 869bbf79d77e0cfe6ce2fa4b886fb0534e18897c889a29ffc64ef3515eb55059 not found: ID does not exist" containerID="869bbf79d77e0cfe6ce2fa4b886fb0534e18897c889a29ffc64ef3515eb55059" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.822683 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"869bbf79d77e0cfe6ce2fa4b886fb0534e18897c889a29ffc64ef3515eb55059"} err="failed to get container status \"869bbf79d77e0cfe6ce2fa4b886fb0534e18897c889a29ffc64ef3515eb55059\": rpc error: code = NotFound desc = could not find container \"869bbf79d77e0cfe6ce2fa4b886fb0534e18897c889a29ffc64ef3515eb55059\": container with ID starting with 869bbf79d77e0cfe6ce2fa4b886fb0534e18897c889a29ffc64ef3515eb55059 not found: ID does not exist" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.837365 4919 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.837401 4919 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:50 crc kubenswrapper[4919]: I0930 20:35:50.837413 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c57c7756-8fcd-43a3-8fdb-026c084f8f33-config\") on node \"crc\" DevicePath \"\"" Sep 30 20:35:51 crc kubenswrapper[4919]: I0930 20:35:51.060065 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-77wv7"] Sep 30 20:35:51 crc kubenswrapper[4919]: I0930 20:35:51.070640 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67b789f86c-77wv7"] Sep 30 20:35:51 crc kubenswrapper[4919]: I0930 20:35:51.644581 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c57c7756-8fcd-43a3-8fdb-026c084f8f33" path="/var/lib/kubelet/pods/c57c7756-8fcd-43a3-8fdb-026c084f8f33/volumes" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.535452 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p"] Sep 30 20:36:04 crc kubenswrapper[4919]: E0930 20:36:04.537452 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38a281fa-ebba-4dab-92eb-26a591f96dc4" containerName="init" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.537472 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="38a281fa-ebba-4dab-92eb-26a591f96dc4" containerName="init" Sep 30 20:36:04 crc kubenswrapper[4919]: E0930 20:36:04.537525 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38a281fa-ebba-4dab-92eb-26a591f96dc4" containerName="dnsmasq-dns" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.537535 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="38a281fa-ebba-4dab-92eb-26a591f96dc4" containerName="dnsmasq-dns" Sep 30 20:36:04 crc kubenswrapper[4919]: E0930 20:36:04.537563 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c57c7756-8fcd-43a3-8fdb-026c084f8f33" containerName="dnsmasq-dns" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.537573 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c57c7756-8fcd-43a3-8fdb-026c084f8f33" containerName="dnsmasq-dns" Sep 30 20:36:04 crc kubenswrapper[4919]: E0930 20:36:04.537621 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c57c7756-8fcd-43a3-8fdb-026c084f8f33" containerName="init" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.537629 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c57c7756-8fcd-43a3-8fdb-026c084f8f33" containerName="init" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.538155 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="38a281fa-ebba-4dab-92eb-26a591f96dc4" containerName="dnsmasq-dns" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.538202 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="c57c7756-8fcd-43a3-8fdb-026c084f8f33" containerName="dnsmasq-dns" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.539413 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.543975 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.544548 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.544891 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.545044 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.575548 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p"] Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.657984 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-n452p\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.658084 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcwn9\" (UniqueName: \"kubernetes.io/projected/73d4ec74-f76d-437b-b91b-dc0e75157be8-kube-api-access-fcwn9\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-n452p\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.658130 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-n452p\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.658163 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-n452p\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.759835 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcwn9\" (UniqueName: \"kubernetes.io/projected/73d4ec74-f76d-437b-b91b-dc0e75157be8-kube-api-access-fcwn9\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-n452p\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.759944 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-n452p\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.759989 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-n452p\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.760165 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-n452p\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.765719 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-n452p\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.766083 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-n452p\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.768028 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-n452p\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.786557 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcwn9\" (UniqueName: \"kubernetes.io/projected/73d4ec74-f76d-437b-b91b-dc0e75157be8-kube-api-access-fcwn9\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-n452p\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:04 crc kubenswrapper[4919]: I0930 20:36:04.859872 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:05 crc kubenswrapper[4919]: I0930 20:36:05.437804 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p"] Sep 30 20:36:05 crc kubenswrapper[4919]: I0930 20:36:05.918061 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" event={"ID":"73d4ec74-f76d-437b-b91b-dc0e75157be8","Type":"ContainerStarted","Data":"916ceaff90e58c09d0194aae788f8503c45d0c10f8781ea8ba9c6493b20c7678"} Sep 30 20:36:06 crc kubenswrapper[4919]: I0930 20:36:06.930549 4919 generic.go:334] "Generic (PLEG): container finished" podID="743c3f7e-1714-48ce-85ba-bf201f5b1c8c" containerID="672ffe5cce8c6ebee3b9689123238d4dab185c6c2828046a4f05b3c14394e3b1" exitCode=0 Sep 30 20:36:06 crc kubenswrapper[4919]: I0930 20:36:06.930864 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"743c3f7e-1714-48ce-85ba-bf201f5b1c8c","Type":"ContainerDied","Data":"672ffe5cce8c6ebee3b9689123238d4dab185c6c2828046a4f05b3c14394e3b1"} Sep 30 20:36:06 crc kubenswrapper[4919]: I0930 20:36:06.934391 4919 generic.go:334] "Generic (PLEG): container finished" podID="edc716fe-90af-4fa2-a733-d4c3fc3e76b9" containerID="df815875ed774145089f488499287c233037f880fd0888aa0475110a1a3cfc06" exitCode=0 Sep 30 20:36:06 crc kubenswrapper[4919]: I0930 20:36:06.934418 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"edc716fe-90af-4fa2-a733-d4c3fc3e76b9","Type":"ContainerDied","Data":"df815875ed774145089f488499287c233037f880fd0888aa0475110a1a3cfc06"} Sep 30 20:36:07 crc kubenswrapper[4919]: I0930 20:36:07.949211 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"edc716fe-90af-4fa2-a733-d4c3fc3e76b9","Type":"ContainerStarted","Data":"abe69d5df37fd61e618161a4a7e17a02db424ee1d1e4f56bc34d3c4acd9cc497"} Sep 30 20:36:07 crc kubenswrapper[4919]: I0930 20:36:07.949992 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 30 20:36:07 crc kubenswrapper[4919]: I0930 20:36:07.954538 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"743c3f7e-1714-48ce-85ba-bf201f5b1c8c","Type":"ContainerStarted","Data":"9ae715bc5b2be75548bd0734d11a7cd198576cab316b63606d33f2c5a3c174b4"} Sep 30 20:36:07 crc kubenswrapper[4919]: I0930 20:36:07.955069 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:36:07 crc kubenswrapper[4919]: I0930 20:36:07.985429 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.98541249 podStartE2EDuration="37.98541249s" podCreationTimestamp="2025-09-30 20:35:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:36:07.978156209 +0000 UTC m=+1353.094189366" watchObservedRunningTime="2025-09-30 20:36:07.98541249 +0000 UTC m=+1353.101445617" Sep 30 20:36:08 crc kubenswrapper[4919]: I0930 20:36:08.029073 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.029053552 podStartE2EDuration="38.029053552s" podCreationTimestamp="2025-09-30 20:35:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 20:36:08.001081217 +0000 UTC m=+1353.117114384" watchObservedRunningTime="2025-09-30 20:36:08.029053552 +0000 UTC m=+1353.145086769" Sep 30 20:36:17 crc kubenswrapper[4919]: I0930 20:36:17.060572 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" event={"ID":"73d4ec74-f76d-437b-b91b-dc0e75157be8","Type":"ContainerStarted","Data":"ecd7337d79425b7ce31589f1b36d529a27e9c43ecd5ebad969491e275e4ba8f8"} Sep 30 20:36:17 crc kubenswrapper[4919]: I0930 20:36:17.078807 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" podStartSLOduration=2.708385502 podStartE2EDuration="13.078789446s" podCreationTimestamp="2025-09-30 20:36:04 +0000 UTC" firstStartedPulling="2025-09-30 20:36:05.444189397 +0000 UTC m=+1350.560222564" lastFinishedPulling="2025-09-30 20:36:15.814593381 +0000 UTC m=+1360.930626508" observedRunningTime="2025-09-30 20:36:17.073048998 +0000 UTC m=+1362.189082135" watchObservedRunningTime="2025-09-30 20:36:17.078789446 +0000 UTC m=+1362.194822573" Sep 30 20:36:20 crc kubenswrapper[4919]: I0930 20:36:20.836122 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 30 20:36:20 crc kubenswrapper[4919]: I0930 20:36:20.859873 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 30 20:36:28 crc kubenswrapper[4919]: I0930 20:36:28.177114 4919 generic.go:334] "Generic (PLEG): container finished" podID="73d4ec74-f76d-437b-b91b-dc0e75157be8" containerID="ecd7337d79425b7ce31589f1b36d529a27e9c43ecd5ebad969491e275e4ba8f8" exitCode=0 Sep 30 20:36:28 crc kubenswrapper[4919]: I0930 20:36:28.177280 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" event={"ID":"73d4ec74-f76d-437b-b91b-dc0e75157be8","Type":"ContainerDied","Data":"ecd7337d79425b7ce31589f1b36d529a27e9c43ecd5ebad969491e275e4ba8f8"} Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.734778 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.754906 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcwn9\" (UniqueName: \"kubernetes.io/projected/73d4ec74-f76d-437b-b91b-dc0e75157be8-kube-api-access-fcwn9\") pod \"73d4ec74-f76d-437b-b91b-dc0e75157be8\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.755038 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-repo-setup-combined-ca-bundle\") pod \"73d4ec74-f76d-437b-b91b-dc0e75157be8\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.755184 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-ssh-key\") pod \"73d4ec74-f76d-437b-b91b-dc0e75157be8\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.755273 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-inventory\") pod \"73d4ec74-f76d-437b-b91b-dc0e75157be8\" (UID: \"73d4ec74-f76d-437b-b91b-dc0e75157be8\") " Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.820717 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73d4ec74-f76d-437b-b91b-dc0e75157be8-kube-api-access-fcwn9" (OuterVolumeSpecName: "kube-api-access-fcwn9") pod "73d4ec74-f76d-437b-b91b-dc0e75157be8" (UID: "73d4ec74-f76d-437b-b91b-dc0e75157be8"). InnerVolumeSpecName "kube-api-access-fcwn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.823333 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "73d4ec74-f76d-437b-b91b-dc0e75157be8" (UID: "73d4ec74-f76d-437b-b91b-dc0e75157be8"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.823575 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-inventory" (OuterVolumeSpecName: "inventory") pod "73d4ec74-f76d-437b-b91b-dc0e75157be8" (UID: "73d4ec74-f76d-437b-b91b-dc0e75157be8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.825040 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "73d4ec74-f76d-437b-b91b-dc0e75157be8" (UID: "73d4ec74-f76d-437b-b91b-dc0e75157be8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.882144 4919 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.882578 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.882792 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/73d4ec74-f76d-437b-b91b-dc0e75157be8-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:29 crc kubenswrapper[4919]: I0930 20:36:29.882952 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcwn9\" (UniqueName: \"kubernetes.io/projected/73d4ec74-f76d-437b-b91b-dc0e75157be8-kube-api-access-fcwn9\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.205413 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" event={"ID":"73d4ec74-f76d-437b-b91b-dc0e75157be8","Type":"ContainerDied","Data":"916ceaff90e58c09d0194aae788f8503c45d0c10f8781ea8ba9c6493b20c7678"} Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.205450 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="916ceaff90e58c09d0194aae788f8503c45d0c10f8781ea8ba9c6493b20c7678" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.205979 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-n452p" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.310689 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x"] Sep 30 20:36:30 crc kubenswrapper[4919]: E0930 20:36:30.311244 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73d4ec74-f76d-437b-b91b-dc0e75157be8" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.311268 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="73d4ec74-f76d-437b-b91b-dc0e75157be8" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.311539 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="73d4ec74-f76d-437b-b91b-dc0e75157be8" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.312397 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.315332 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.315983 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.315998 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.316400 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.320739 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x"] Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.390780 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/99e35059-a993-4792-bad5-4bfb1615a04f-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5vq8x\" (UID: \"99e35059-a993-4792-bad5-4bfb1615a04f\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.390866 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlr86\" (UniqueName: \"kubernetes.io/projected/99e35059-a993-4792-bad5-4bfb1615a04f-kube-api-access-hlr86\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5vq8x\" (UID: \"99e35059-a993-4792-bad5-4bfb1615a04f\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.391119 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/99e35059-a993-4792-bad5-4bfb1615a04f-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5vq8x\" (UID: \"99e35059-a993-4792-bad5-4bfb1615a04f\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.492562 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/99e35059-a993-4792-bad5-4bfb1615a04f-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5vq8x\" (UID: \"99e35059-a993-4792-bad5-4bfb1615a04f\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.492735 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/99e35059-a993-4792-bad5-4bfb1615a04f-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5vq8x\" (UID: \"99e35059-a993-4792-bad5-4bfb1615a04f\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.492784 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlr86\" (UniqueName: \"kubernetes.io/projected/99e35059-a993-4792-bad5-4bfb1615a04f-kube-api-access-hlr86\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5vq8x\" (UID: \"99e35059-a993-4792-bad5-4bfb1615a04f\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.497696 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/99e35059-a993-4792-bad5-4bfb1615a04f-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5vq8x\" (UID: \"99e35059-a993-4792-bad5-4bfb1615a04f\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.498023 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/99e35059-a993-4792-bad5-4bfb1615a04f-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5vq8x\" (UID: \"99e35059-a993-4792-bad5-4bfb1615a04f\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.510704 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlr86\" (UniqueName: \"kubernetes.io/projected/99e35059-a993-4792-bad5-4bfb1615a04f-kube-api-access-hlr86\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5vq8x\" (UID: \"99e35059-a993-4792-bad5-4bfb1615a04f\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:30 crc kubenswrapper[4919]: I0930 20:36:30.631626 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:31 crc kubenswrapper[4919]: I0930 20:36:31.332881 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x"] Sep 30 20:36:32 crc kubenswrapper[4919]: I0930 20:36:32.233956 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" event={"ID":"99e35059-a993-4792-bad5-4bfb1615a04f","Type":"ContainerStarted","Data":"33dda9d7096aa3bd0e35e81184a94c8d89d1c4285a7c4a9487178a00c1d0cb8e"} Sep 30 20:36:32 crc kubenswrapper[4919]: I0930 20:36:32.234463 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" event={"ID":"99e35059-a993-4792-bad5-4bfb1615a04f","Type":"ContainerStarted","Data":"99f789fb4b20e4b42ce064c5860fabad28839eb1d852caf5c228fa122d2bb5b4"} Sep 30 20:36:32 crc kubenswrapper[4919]: I0930 20:36:32.262740 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" podStartSLOduration=1.777818815 podStartE2EDuration="2.262714067s" podCreationTimestamp="2025-09-30 20:36:30 +0000 UTC" firstStartedPulling="2025-09-30 20:36:31.347774042 +0000 UTC m=+1376.463807169" lastFinishedPulling="2025-09-30 20:36:31.832669294 +0000 UTC m=+1376.948702421" observedRunningTime="2025-09-30 20:36:32.259519194 +0000 UTC m=+1377.375552361" watchObservedRunningTime="2025-09-30 20:36:32.262714067 +0000 UTC m=+1377.378747234" Sep 30 20:36:35 crc kubenswrapper[4919]: I0930 20:36:35.269664 4919 generic.go:334] "Generic (PLEG): container finished" podID="99e35059-a993-4792-bad5-4bfb1615a04f" containerID="33dda9d7096aa3bd0e35e81184a94c8d89d1c4285a7c4a9487178a00c1d0cb8e" exitCode=0 Sep 30 20:36:35 crc kubenswrapper[4919]: I0930 20:36:35.269775 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" event={"ID":"99e35059-a993-4792-bad5-4bfb1615a04f","Type":"ContainerDied","Data":"33dda9d7096aa3bd0e35e81184a94c8d89d1c4285a7c4a9487178a00c1d0cb8e"} Sep 30 20:36:36 crc kubenswrapper[4919]: I0930 20:36:36.730614 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:36 crc kubenswrapper[4919]: I0930 20:36:36.874470 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/99e35059-a993-4792-bad5-4bfb1615a04f-ssh-key\") pod \"99e35059-a993-4792-bad5-4bfb1615a04f\" (UID: \"99e35059-a993-4792-bad5-4bfb1615a04f\") " Sep 30 20:36:36 crc kubenswrapper[4919]: I0930 20:36:36.874905 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlr86\" (UniqueName: \"kubernetes.io/projected/99e35059-a993-4792-bad5-4bfb1615a04f-kube-api-access-hlr86\") pod \"99e35059-a993-4792-bad5-4bfb1615a04f\" (UID: \"99e35059-a993-4792-bad5-4bfb1615a04f\") " Sep 30 20:36:36 crc kubenswrapper[4919]: I0930 20:36:36.875035 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/99e35059-a993-4792-bad5-4bfb1615a04f-inventory\") pod \"99e35059-a993-4792-bad5-4bfb1615a04f\" (UID: \"99e35059-a993-4792-bad5-4bfb1615a04f\") " Sep 30 20:36:36 crc kubenswrapper[4919]: I0930 20:36:36.879682 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99e35059-a993-4792-bad5-4bfb1615a04f-kube-api-access-hlr86" (OuterVolumeSpecName: "kube-api-access-hlr86") pod "99e35059-a993-4792-bad5-4bfb1615a04f" (UID: "99e35059-a993-4792-bad5-4bfb1615a04f"). InnerVolumeSpecName "kube-api-access-hlr86". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:36:36 crc kubenswrapper[4919]: I0930 20:36:36.906181 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99e35059-a993-4792-bad5-4bfb1615a04f-inventory" (OuterVolumeSpecName: "inventory") pod "99e35059-a993-4792-bad5-4bfb1615a04f" (UID: "99e35059-a993-4792-bad5-4bfb1615a04f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:36:36 crc kubenswrapper[4919]: I0930 20:36:36.912898 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99e35059-a993-4792-bad5-4bfb1615a04f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "99e35059-a993-4792-bad5-4bfb1615a04f" (UID: "99e35059-a993-4792-bad5-4bfb1615a04f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:36:36 crc kubenswrapper[4919]: I0930 20:36:36.978584 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlr86\" (UniqueName: \"kubernetes.io/projected/99e35059-a993-4792-bad5-4bfb1615a04f-kube-api-access-hlr86\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:36 crc kubenswrapper[4919]: I0930 20:36:36.978639 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/99e35059-a993-4792-bad5-4bfb1615a04f-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:36 crc kubenswrapper[4919]: I0930 20:36:36.978661 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/99e35059-a993-4792-bad5-4bfb1615a04f-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.296396 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" event={"ID":"99e35059-a993-4792-bad5-4bfb1615a04f","Type":"ContainerDied","Data":"99f789fb4b20e4b42ce064c5860fabad28839eb1d852caf5c228fa122d2bb5b4"} Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.296877 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99f789fb4b20e4b42ce064c5860fabad28839eb1d852caf5c228fa122d2bb5b4" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.296469 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5vq8x" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.385909 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg"] Sep 30 20:36:37 crc kubenswrapper[4919]: E0930 20:36:37.387030 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99e35059-a993-4792-bad5-4bfb1615a04f" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.387162 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="99e35059-a993-4792-bad5-4bfb1615a04f" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.387623 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="99e35059-a993-4792-bad5-4bfb1615a04f" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.388850 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.393696 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.394267 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.394823 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.394920 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.411287 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg"] Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.494509 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.494686 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.494763 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chh5p\" (UniqueName: \"kubernetes.io/projected/8ed94f60-cd6c-4559-879b-de97554383c6-kube-api-access-chh5p\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.494861 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.596732 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.596839 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chh5p\" (UniqueName: \"kubernetes.io/projected/8ed94f60-cd6c-4559-879b-de97554383c6-kube-api-access-chh5p\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.596929 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.596959 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.604961 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.606168 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.608295 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.619194 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chh5p\" (UniqueName: \"kubernetes.io/projected/8ed94f60-cd6c-4559-879b-de97554383c6-kube-api-access-chh5p\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:37 crc kubenswrapper[4919]: I0930 20:36:37.711188 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:36:38 crc kubenswrapper[4919]: I0930 20:36:38.360114 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg"] Sep 30 20:36:39 crc kubenswrapper[4919]: I0930 20:36:39.321546 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" event={"ID":"8ed94f60-cd6c-4559-879b-de97554383c6","Type":"ContainerStarted","Data":"d991b94221ede420d817b7a836c826f4e29b99a980884bdb9c8d9ac4c0bc0ae2"} Sep 30 20:36:39 crc kubenswrapper[4919]: I0930 20:36:39.322424 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" event={"ID":"8ed94f60-cd6c-4559-879b-de97554383c6","Type":"ContainerStarted","Data":"93faeec87c32a52fba948829f93ea77c8c642bda325eac55deb6731c6284cbfb"} Sep 30 20:36:39 crc kubenswrapper[4919]: I0930 20:36:39.348315 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" podStartSLOduration=1.948919096 podStartE2EDuration="2.348287975s" podCreationTimestamp="2025-09-30 20:36:37 +0000 UTC" firstStartedPulling="2025-09-30 20:36:38.36197722 +0000 UTC m=+1383.478010357" lastFinishedPulling="2025-09-30 20:36:38.761346099 +0000 UTC m=+1383.877379236" observedRunningTime="2025-09-30 20:36:39.342054603 +0000 UTC m=+1384.458087730" watchObservedRunningTime="2025-09-30 20:36:39.348287975 +0000 UTC m=+1384.464321102" Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.064947 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fbrrz"] Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.068134 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.086623 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbrrz"] Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.181081 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-utilities\") pod \"redhat-marketplace-fbrrz\" (UID: \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\") " pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.182364 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtzlf\" (UniqueName: \"kubernetes.io/projected/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-kube-api-access-mtzlf\") pod \"redhat-marketplace-fbrrz\" (UID: \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\") " pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.182504 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-catalog-content\") pod \"redhat-marketplace-fbrrz\" (UID: \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\") " pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.284820 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtzlf\" (UniqueName: \"kubernetes.io/projected/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-kube-api-access-mtzlf\") pod \"redhat-marketplace-fbrrz\" (UID: \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\") " pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.284993 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-catalog-content\") pod \"redhat-marketplace-fbrrz\" (UID: \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\") " pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.285102 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-utilities\") pod \"redhat-marketplace-fbrrz\" (UID: \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\") " pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.285903 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-utilities\") pod \"redhat-marketplace-fbrrz\" (UID: \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\") " pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.285931 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-catalog-content\") pod \"redhat-marketplace-fbrrz\" (UID: \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\") " pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.315770 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtzlf\" (UniqueName: \"kubernetes.io/projected/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-kube-api-access-mtzlf\") pod \"redhat-marketplace-fbrrz\" (UID: \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\") " pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.440775 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.938153 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbrrz"] Sep 30 20:37:32 crc kubenswrapper[4919]: I0930 20:37:32.976087 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbrrz" event={"ID":"8abd98bf-6d1f-48a7-ac70-09f4f9833aef","Type":"ContainerStarted","Data":"ea25fe2569d0897305ddc82aed421705e446a22e19cbd0626b61f7c79d429af1"} Sep 30 20:37:33 crc kubenswrapper[4919]: I0930 20:37:33.988909 4919 generic.go:334] "Generic (PLEG): container finished" podID="8abd98bf-6d1f-48a7-ac70-09f4f9833aef" containerID="da1d567a095f3738e7b6766a9f085e9d7c70d898f5fce7fd2e7d390bc174b38f" exitCode=0 Sep 30 20:37:33 crc kubenswrapper[4919]: I0930 20:37:33.988973 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbrrz" event={"ID":"8abd98bf-6d1f-48a7-ac70-09f4f9833aef","Type":"ContainerDied","Data":"da1d567a095f3738e7b6766a9f085e9d7c70d898f5fce7fd2e7d390bc174b38f"} Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.449993 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6hfwk"] Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.453021 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.465896 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6hfwk"] Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.584092 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-utilities\") pod \"redhat-operators-6hfwk\" (UID: \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\") " pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.584607 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shzcs\" (UniqueName: \"kubernetes.io/projected/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-kube-api-access-shzcs\") pod \"redhat-operators-6hfwk\" (UID: \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\") " pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.584913 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-catalog-content\") pod \"redhat-operators-6hfwk\" (UID: \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\") " pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.687577 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-utilities\") pod \"redhat-operators-6hfwk\" (UID: \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\") " pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.687655 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shzcs\" (UniqueName: \"kubernetes.io/projected/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-kube-api-access-shzcs\") pod \"redhat-operators-6hfwk\" (UID: \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\") " pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.687834 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-catalog-content\") pod \"redhat-operators-6hfwk\" (UID: \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\") " pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.688273 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-utilities\") pod \"redhat-operators-6hfwk\" (UID: \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\") " pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.688338 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-catalog-content\") pod \"redhat-operators-6hfwk\" (UID: \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\") " pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.707204 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shzcs\" (UniqueName: \"kubernetes.io/projected/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-kube-api-access-shzcs\") pod \"redhat-operators-6hfwk\" (UID: \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\") " pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:36 crc kubenswrapper[4919]: I0930 20:37:36.810543 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:37 crc kubenswrapper[4919]: I0930 20:37:37.025432 4919 generic.go:334] "Generic (PLEG): container finished" podID="8abd98bf-6d1f-48a7-ac70-09f4f9833aef" containerID="d2c6ea66f874d7584925ce1734d87585dc7920a2a4615414def81485cb227e8b" exitCode=0 Sep 30 20:37:37 crc kubenswrapper[4919]: I0930 20:37:37.025498 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbrrz" event={"ID":"8abd98bf-6d1f-48a7-ac70-09f4f9833aef","Type":"ContainerDied","Data":"d2c6ea66f874d7584925ce1734d87585dc7920a2a4615414def81485cb227e8b"} Sep 30 20:37:37 crc kubenswrapper[4919]: W0930 20:37:37.277819 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6597d2b2_b8f4_447c_b5dc_f1b9d04e3e1d.slice/crio-ebe0214884d512ff2157fc5a6dc34db073feed19d718daaedff7163281032d7d WatchSource:0}: Error finding container ebe0214884d512ff2157fc5a6dc34db073feed19d718daaedff7163281032d7d: Status 404 returned error can't find the container with id ebe0214884d512ff2157fc5a6dc34db073feed19d718daaedff7163281032d7d Sep 30 20:37:37 crc kubenswrapper[4919]: I0930 20:37:37.277935 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6hfwk"] Sep 30 20:37:37 crc kubenswrapper[4919]: I0930 20:37:37.588234 4919 scope.go:117] "RemoveContainer" containerID="f4d0b64baf8db1cdca9e217a3018da4c1ed4bacb6f0da768aabf52273e0b8039" Sep 30 20:37:37 crc kubenswrapper[4919]: I0930 20:37:37.649288 4919 scope.go:117] "RemoveContainer" containerID="3721b47eb484f189890c244c55ec3bf9ec1a039895753039a04e59791b73bf27" Sep 30 20:37:37 crc kubenswrapper[4919]: I0930 20:37:37.726791 4919 scope.go:117] "RemoveContainer" containerID="b14153c70f19e91da910b2b172f1f2b0ce92112a9f3379d578588ec39d00db9d" Sep 30 20:37:37 crc kubenswrapper[4919]: I0930 20:37:37.782795 4919 scope.go:117] "RemoveContainer" containerID="eb5b2c5fab103429a461f49d6ddd266b9242079e49e40d5c5e7e0793d6760d28" Sep 30 20:37:37 crc kubenswrapper[4919]: I0930 20:37:37.811515 4919 scope.go:117] "RemoveContainer" containerID="6764c23eea435c9d1cb7462a9d7b55aa5bd4466c48656175559734d83b4496c8" Sep 30 20:37:38 crc kubenswrapper[4919]: I0930 20:37:38.039200 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbrrz" event={"ID":"8abd98bf-6d1f-48a7-ac70-09f4f9833aef","Type":"ContainerStarted","Data":"b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251"} Sep 30 20:37:38 crc kubenswrapper[4919]: I0930 20:37:38.042416 4919 generic.go:334] "Generic (PLEG): container finished" podID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" containerID="f2fe40cbb6bb0df5d612299b2f207bd2f473b131d2f73d14127ee136b236a5c9" exitCode=0 Sep 30 20:37:38 crc kubenswrapper[4919]: I0930 20:37:38.042465 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6hfwk" event={"ID":"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d","Type":"ContainerDied","Data":"f2fe40cbb6bb0df5d612299b2f207bd2f473b131d2f73d14127ee136b236a5c9"} Sep 30 20:37:38 crc kubenswrapper[4919]: I0930 20:37:38.042484 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6hfwk" event={"ID":"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d","Type":"ContainerStarted","Data":"ebe0214884d512ff2157fc5a6dc34db073feed19d718daaedff7163281032d7d"} Sep 30 20:37:38 crc kubenswrapper[4919]: I0930 20:37:38.064804 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fbrrz" podStartSLOduration=2.620059703 podStartE2EDuration="6.064789767s" podCreationTimestamp="2025-09-30 20:37:32 +0000 UTC" firstStartedPulling="2025-09-30 20:37:33.991279341 +0000 UTC m=+1439.107312468" lastFinishedPulling="2025-09-30 20:37:37.436009405 +0000 UTC m=+1442.552042532" observedRunningTime="2025-09-30 20:37:38.059703339 +0000 UTC m=+1443.175736466" watchObservedRunningTime="2025-09-30 20:37:38.064789767 +0000 UTC m=+1443.180822894" Sep 30 20:37:40 crc kubenswrapper[4919]: I0930 20:37:40.072357 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6hfwk" event={"ID":"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d","Type":"ContainerStarted","Data":"2695de4a27059858a025fff5267c8625ee4c8766358253c929e443a62dfd62f9"} Sep 30 20:37:41 crc kubenswrapper[4919]: I0930 20:37:41.087871 4919 generic.go:334] "Generic (PLEG): container finished" podID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" containerID="2695de4a27059858a025fff5267c8625ee4c8766358253c929e443a62dfd62f9" exitCode=0 Sep 30 20:37:41 crc kubenswrapper[4919]: I0930 20:37:41.087991 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6hfwk" event={"ID":"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d","Type":"ContainerDied","Data":"2695de4a27059858a025fff5267c8625ee4c8766358253c929e443a62dfd62f9"} Sep 30 20:37:42 crc kubenswrapper[4919]: I0930 20:37:42.442194 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:42 crc kubenswrapper[4919]: I0930 20:37:42.443358 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:42 crc kubenswrapper[4919]: I0930 20:37:42.507262 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:43 crc kubenswrapper[4919]: I0930 20:37:43.112253 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6hfwk" event={"ID":"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d","Type":"ContainerStarted","Data":"c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa"} Sep 30 20:37:43 crc kubenswrapper[4919]: I0930 20:37:43.134107 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6hfwk" podStartSLOduration=3.6308408500000002 podStartE2EDuration="7.134089674s" podCreationTimestamp="2025-09-30 20:37:36 +0000 UTC" firstStartedPulling="2025-09-30 20:37:38.043921341 +0000 UTC m=+1443.159954468" lastFinishedPulling="2025-09-30 20:37:41.547170155 +0000 UTC m=+1446.663203292" observedRunningTime="2025-09-30 20:37:43.128070659 +0000 UTC m=+1448.244103796" watchObservedRunningTime="2025-09-30 20:37:43.134089674 +0000 UTC m=+1448.250122801" Sep 30 20:37:43 crc kubenswrapper[4919]: I0930 20:37:43.159405 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:44 crc kubenswrapper[4919]: I0930 20:37:44.033948 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbrrz"] Sep 30 20:37:45 crc kubenswrapper[4919]: I0930 20:37:45.134937 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fbrrz" podUID="8abd98bf-6d1f-48a7-ac70-09f4f9833aef" containerName="registry-server" containerID="cri-o://b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251" gracePeriod=2 Sep 30 20:37:45 crc kubenswrapper[4919]: I0930 20:37:45.651773 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:45 crc kubenswrapper[4919]: I0930 20:37:45.782842 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-catalog-content\") pod \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\" (UID: \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\") " Sep 30 20:37:45 crc kubenswrapper[4919]: I0930 20:37:45.783093 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-utilities\") pod \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\" (UID: \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\") " Sep 30 20:37:45 crc kubenswrapper[4919]: I0930 20:37:45.783333 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtzlf\" (UniqueName: \"kubernetes.io/projected/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-kube-api-access-mtzlf\") pod \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\" (UID: \"8abd98bf-6d1f-48a7-ac70-09f4f9833aef\") " Sep 30 20:37:45 crc kubenswrapper[4919]: I0930 20:37:45.784310 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-utilities" (OuterVolumeSpecName: "utilities") pod "8abd98bf-6d1f-48a7-ac70-09f4f9833aef" (UID: "8abd98bf-6d1f-48a7-ac70-09f4f9833aef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:37:45 crc kubenswrapper[4919]: I0930 20:37:45.788162 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:37:45 crc kubenswrapper[4919]: I0930 20:37:45.790174 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-kube-api-access-mtzlf" (OuterVolumeSpecName: "kube-api-access-mtzlf") pod "8abd98bf-6d1f-48a7-ac70-09f4f9833aef" (UID: "8abd98bf-6d1f-48a7-ac70-09f4f9833aef"). InnerVolumeSpecName "kube-api-access-mtzlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:37:45 crc kubenswrapper[4919]: I0930 20:37:45.794638 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8abd98bf-6d1f-48a7-ac70-09f4f9833aef" (UID: "8abd98bf-6d1f-48a7-ac70-09f4f9833aef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:37:45 crc kubenswrapper[4919]: I0930 20:37:45.890239 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:37:45 crc kubenswrapper[4919]: I0930 20:37:45.890290 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtzlf\" (UniqueName: \"kubernetes.io/projected/8abd98bf-6d1f-48a7-ac70-09f4f9833aef-kube-api-access-mtzlf\") on node \"crc\" DevicePath \"\"" Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.150390 4919 generic.go:334] "Generic (PLEG): container finished" podID="8abd98bf-6d1f-48a7-ac70-09f4f9833aef" containerID="b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251" exitCode=0 Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.150486 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbrrz" event={"ID":"8abd98bf-6d1f-48a7-ac70-09f4f9833aef","Type":"ContainerDied","Data":"b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251"} Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.150611 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fbrrz" event={"ID":"8abd98bf-6d1f-48a7-ac70-09f4f9833aef","Type":"ContainerDied","Data":"ea25fe2569d0897305ddc82aed421705e446a22e19cbd0626b61f7c79d429af1"} Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.150651 4919 scope.go:117] "RemoveContainer" containerID="b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251" Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.150526 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fbrrz" Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.205630 4919 scope.go:117] "RemoveContainer" containerID="d2c6ea66f874d7584925ce1734d87585dc7920a2a4615414def81485cb227e8b" Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.218146 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbrrz"] Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.232424 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fbrrz"] Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.243126 4919 scope.go:117] "RemoveContainer" containerID="da1d567a095f3738e7b6766a9f085e9d7c70d898f5fce7fd2e7d390bc174b38f" Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.301063 4919 scope.go:117] "RemoveContainer" containerID="b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251" Sep 30 20:37:46 crc kubenswrapper[4919]: E0930 20:37:46.301676 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251\": container with ID starting with b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251 not found: ID does not exist" containerID="b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251" Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.301727 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251"} err="failed to get container status \"b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251\": rpc error: code = NotFound desc = could not find container \"b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251\": container with ID starting with b658f051f3b0d07aa1a14087b3a8ace53f3b0410aa07b82a0badbe6c6c879251 not found: ID does not exist" Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.301765 4919 scope.go:117] "RemoveContainer" containerID="d2c6ea66f874d7584925ce1734d87585dc7920a2a4615414def81485cb227e8b" Sep 30 20:37:46 crc kubenswrapper[4919]: E0930 20:37:46.302301 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2c6ea66f874d7584925ce1734d87585dc7920a2a4615414def81485cb227e8b\": container with ID starting with d2c6ea66f874d7584925ce1734d87585dc7920a2a4615414def81485cb227e8b not found: ID does not exist" containerID="d2c6ea66f874d7584925ce1734d87585dc7920a2a4615414def81485cb227e8b" Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.302357 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2c6ea66f874d7584925ce1734d87585dc7920a2a4615414def81485cb227e8b"} err="failed to get container status \"d2c6ea66f874d7584925ce1734d87585dc7920a2a4615414def81485cb227e8b\": rpc error: code = NotFound desc = could not find container \"d2c6ea66f874d7584925ce1734d87585dc7920a2a4615414def81485cb227e8b\": container with ID starting with d2c6ea66f874d7584925ce1734d87585dc7920a2a4615414def81485cb227e8b not found: ID does not exist" Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.302397 4919 scope.go:117] "RemoveContainer" containerID="da1d567a095f3738e7b6766a9f085e9d7c70d898f5fce7fd2e7d390bc174b38f" Sep 30 20:37:46 crc kubenswrapper[4919]: E0930 20:37:46.303004 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da1d567a095f3738e7b6766a9f085e9d7c70d898f5fce7fd2e7d390bc174b38f\": container with ID starting with da1d567a095f3738e7b6766a9f085e9d7c70d898f5fce7fd2e7d390bc174b38f not found: ID does not exist" containerID="da1d567a095f3738e7b6766a9f085e9d7c70d898f5fce7fd2e7d390bc174b38f" Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.303050 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da1d567a095f3738e7b6766a9f085e9d7c70d898f5fce7fd2e7d390bc174b38f"} err="failed to get container status \"da1d567a095f3738e7b6766a9f085e9d7c70d898f5fce7fd2e7d390bc174b38f\": rpc error: code = NotFound desc = could not find container \"da1d567a095f3738e7b6766a9f085e9d7c70d898f5fce7fd2e7d390bc174b38f\": container with ID starting with da1d567a095f3738e7b6766a9f085e9d7c70d898f5fce7fd2e7d390bc174b38f not found: ID does not exist" Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.811309 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:46 crc kubenswrapper[4919]: I0930 20:37:46.811346 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:47 crc kubenswrapper[4919]: I0930 20:37:47.649030 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8abd98bf-6d1f-48a7-ac70-09f4f9833aef" path="/var/lib/kubelet/pods/8abd98bf-6d1f-48a7-ac70-09f4f9833aef/volumes" Sep 30 20:37:47 crc kubenswrapper[4919]: I0930 20:37:47.870121 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6hfwk" podUID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" containerName="registry-server" probeResult="failure" output=< Sep 30 20:37:47 crc kubenswrapper[4919]: timeout: failed to connect service ":50051" within 1s Sep 30 20:37:47 crc kubenswrapper[4919]: > Sep 30 20:37:56 crc kubenswrapper[4919]: I0930 20:37:56.062233 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:37:56 crc kubenswrapper[4919]: I0930 20:37:56.063428 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:37:56 crc kubenswrapper[4919]: I0930 20:37:56.907883 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:56 crc kubenswrapper[4919]: I0930 20:37:56.966599 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:57 crc kubenswrapper[4919]: I0930 20:37:57.149245 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6hfwk"] Sep 30 20:37:58 crc kubenswrapper[4919]: I0930 20:37:58.293392 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6hfwk" podUID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" containerName="registry-server" containerID="cri-o://c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa" gracePeriod=2 Sep 30 20:37:58 crc kubenswrapper[4919]: I0930 20:37:58.778773 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:58 crc kubenswrapper[4919]: I0930 20:37:58.819770 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-catalog-content\") pod \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\" (UID: \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\") " Sep 30 20:37:58 crc kubenswrapper[4919]: I0930 20:37:58.820292 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-utilities\") pod \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\" (UID: \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\") " Sep 30 20:37:58 crc kubenswrapper[4919]: I0930 20:37:58.820370 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shzcs\" (UniqueName: \"kubernetes.io/projected/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-kube-api-access-shzcs\") pod \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\" (UID: \"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d\") " Sep 30 20:37:58 crc kubenswrapper[4919]: I0930 20:37:58.821308 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-utilities" (OuterVolumeSpecName: "utilities") pod "6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" (UID: "6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:37:58 crc kubenswrapper[4919]: I0930 20:37:58.821437 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:37:58 crc kubenswrapper[4919]: I0930 20:37:58.832497 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-kube-api-access-shzcs" (OuterVolumeSpecName: "kube-api-access-shzcs") pod "6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" (UID: "6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d"). InnerVolumeSpecName "kube-api-access-shzcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:37:58 crc kubenswrapper[4919]: I0930 20:37:58.899875 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" (UID: "6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:37:58 crc kubenswrapper[4919]: I0930 20:37:58.922276 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:37:58 crc kubenswrapper[4919]: I0930 20:37:58.922311 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shzcs\" (UniqueName: \"kubernetes.io/projected/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d-kube-api-access-shzcs\") on node \"crc\" DevicePath \"\"" Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.309255 4919 generic.go:334] "Generic (PLEG): container finished" podID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" containerID="c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa" exitCode=0 Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.309336 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6hfwk" event={"ID":"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d","Type":"ContainerDied","Data":"c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa"} Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.309354 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6hfwk" Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.309401 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6hfwk" event={"ID":"6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d","Type":"ContainerDied","Data":"ebe0214884d512ff2157fc5a6dc34db073feed19d718daaedff7163281032d7d"} Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.309445 4919 scope.go:117] "RemoveContainer" containerID="c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa" Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.335129 4919 scope.go:117] "RemoveContainer" containerID="2695de4a27059858a025fff5267c8625ee4c8766358253c929e443a62dfd62f9" Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.361164 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6hfwk"] Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.367798 4919 scope.go:117] "RemoveContainer" containerID="f2fe40cbb6bb0df5d612299b2f207bd2f473b131d2f73d14127ee136b236a5c9" Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.372007 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6hfwk"] Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.431614 4919 scope.go:117] "RemoveContainer" containerID="c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa" Sep 30 20:37:59 crc kubenswrapper[4919]: E0930 20:37:59.433625 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa\": container with ID starting with c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa not found: ID does not exist" containerID="c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa" Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.433680 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa"} err="failed to get container status \"c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa\": rpc error: code = NotFound desc = could not find container \"c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa\": container with ID starting with c81430b6fa6188e73be696fc0a809f7535ff506d584aa7e045fdcb24df700efa not found: ID does not exist" Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.433716 4919 scope.go:117] "RemoveContainer" containerID="2695de4a27059858a025fff5267c8625ee4c8766358253c929e443a62dfd62f9" Sep 30 20:37:59 crc kubenswrapper[4919]: E0930 20:37:59.434186 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2695de4a27059858a025fff5267c8625ee4c8766358253c929e443a62dfd62f9\": container with ID starting with 2695de4a27059858a025fff5267c8625ee4c8766358253c929e443a62dfd62f9 not found: ID does not exist" containerID="2695de4a27059858a025fff5267c8625ee4c8766358253c929e443a62dfd62f9" Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.434258 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2695de4a27059858a025fff5267c8625ee4c8766358253c929e443a62dfd62f9"} err="failed to get container status \"2695de4a27059858a025fff5267c8625ee4c8766358253c929e443a62dfd62f9\": rpc error: code = NotFound desc = could not find container \"2695de4a27059858a025fff5267c8625ee4c8766358253c929e443a62dfd62f9\": container with ID starting with 2695de4a27059858a025fff5267c8625ee4c8766358253c929e443a62dfd62f9 not found: ID does not exist" Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.434286 4919 scope.go:117] "RemoveContainer" containerID="f2fe40cbb6bb0df5d612299b2f207bd2f473b131d2f73d14127ee136b236a5c9" Sep 30 20:37:59 crc kubenswrapper[4919]: E0930 20:37:59.434625 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2fe40cbb6bb0df5d612299b2f207bd2f473b131d2f73d14127ee136b236a5c9\": container with ID starting with f2fe40cbb6bb0df5d612299b2f207bd2f473b131d2f73d14127ee136b236a5c9 not found: ID does not exist" containerID="f2fe40cbb6bb0df5d612299b2f207bd2f473b131d2f73d14127ee136b236a5c9" Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.434681 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2fe40cbb6bb0df5d612299b2f207bd2f473b131d2f73d14127ee136b236a5c9"} err="failed to get container status \"f2fe40cbb6bb0df5d612299b2f207bd2f473b131d2f73d14127ee136b236a5c9\": rpc error: code = NotFound desc = could not find container \"f2fe40cbb6bb0df5d612299b2f207bd2f473b131d2f73d14127ee136b236a5c9\": container with ID starting with f2fe40cbb6bb0df5d612299b2f207bd2f473b131d2f73d14127ee136b236a5c9 not found: ID does not exist" Sep 30 20:37:59 crc kubenswrapper[4919]: I0930 20:37:59.653575 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" path="/var/lib/kubelet/pods/6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d/volumes" Sep 30 20:38:26 crc kubenswrapper[4919]: I0930 20:38:26.062587 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:38:26 crc kubenswrapper[4919]: I0930 20:38:26.063367 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:38:37 crc kubenswrapper[4919]: I0930 20:38:37.961908 4919 scope.go:117] "RemoveContainer" containerID="f5d1214c9514f8da649a24e16ddb47e8ea8c9a384bdc26619aa32cd2c5a47859" Sep 30 20:38:38 crc kubenswrapper[4919]: I0930 20:38:38.000791 4919 scope.go:117] "RemoveContainer" containerID="1b43f029ee709a2a65c1eedfa14e9a41f60be52d43646e0d62b388321d3b3187" Sep 30 20:38:38 crc kubenswrapper[4919]: I0930 20:38:38.042971 4919 scope.go:117] "RemoveContainer" containerID="cf1b0adec0568e6b1fdb72c7df2e7d499b2b4460b23afc188bdbb3dab312725c" Sep 30 20:38:38 crc kubenswrapper[4919]: I0930 20:38:38.082373 4919 scope.go:117] "RemoveContainer" containerID="8298497be040e62e3183ad776599c51924b3293af2ebff9fb704621cffdb427d" Sep 30 20:38:38 crc kubenswrapper[4919]: I0930 20:38:38.121273 4919 scope.go:117] "RemoveContainer" containerID="5aea0a1c2c28d0e035cf58716118087ea692894503a86dd217f6d61d06ef79b0" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.791147 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t7pv7"] Sep 30 20:38:50 crc kubenswrapper[4919]: E0930 20:38:50.792489 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8abd98bf-6d1f-48a7-ac70-09f4f9833aef" containerName="extract-content" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.792513 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8abd98bf-6d1f-48a7-ac70-09f4f9833aef" containerName="extract-content" Sep 30 20:38:50 crc kubenswrapper[4919]: E0930 20:38:50.792541 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" containerName="registry-server" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.792554 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" containerName="registry-server" Sep 30 20:38:50 crc kubenswrapper[4919]: E0930 20:38:50.792599 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8abd98bf-6d1f-48a7-ac70-09f4f9833aef" containerName="extract-utilities" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.792613 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8abd98bf-6d1f-48a7-ac70-09f4f9833aef" containerName="extract-utilities" Sep 30 20:38:50 crc kubenswrapper[4919]: E0930 20:38:50.792655 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" containerName="extract-utilities" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.792668 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" containerName="extract-utilities" Sep 30 20:38:50 crc kubenswrapper[4919]: E0930 20:38:50.792693 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8abd98bf-6d1f-48a7-ac70-09f4f9833aef" containerName="registry-server" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.792705 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8abd98bf-6d1f-48a7-ac70-09f4f9833aef" containerName="registry-server" Sep 30 20:38:50 crc kubenswrapper[4919]: E0930 20:38:50.792735 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" containerName="extract-content" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.792747 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" containerName="extract-content" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.793123 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="6597d2b2-b8f4-447c-b5dc-f1b9d04e3e1d" containerName="registry-server" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.793156 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="8abd98bf-6d1f-48a7-ac70-09f4f9833aef" containerName="registry-server" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.795938 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.799073 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t7pv7"] Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.830383 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-utilities\") pod \"certified-operators-t7pv7\" (UID: \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\") " pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.830497 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sl2b\" (UniqueName: \"kubernetes.io/projected/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-kube-api-access-2sl2b\") pod \"certified-operators-t7pv7\" (UID: \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\") " pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.830599 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-catalog-content\") pod \"certified-operators-t7pv7\" (UID: \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\") " pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.932382 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-utilities\") pod \"certified-operators-t7pv7\" (UID: \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\") " pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.932457 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sl2b\" (UniqueName: \"kubernetes.io/projected/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-kube-api-access-2sl2b\") pod \"certified-operators-t7pv7\" (UID: \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\") " pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.932514 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-catalog-content\") pod \"certified-operators-t7pv7\" (UID: \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\") " pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.932915 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-utilities\") pod \"certified-operators-t7pv7\" (UID: \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\") " pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.933060 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-catalog-content\") pod \"certified-operators-t7pv7\" (UID: \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\") " pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:38:50 crc kubenswrapper[4919]: I0930 20:38:50.962490 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sl2b\" (UniqueName: \"kubernetes.io/projected/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-kube-api-access-2sl2b\") pod \"certified-operators-t7pv7\" (UID: \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\") " pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:38:51 crc kubenswrapper[4919]: I0930 20:38:51.126052 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:38:51 crc kubenswrapper[4919]: I0930 20:38:51.718116 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t7pv7"] Sep 30 20:38:52 crc kubenswrapper[4919]: I0930 20:38:52.017180 4919 generic.go:334] "Generic (PLEG): container finished" podID="c42a7ca7-a6d3-4b42-84ca-a5b294e96906" containerID="1fef672291df678bc40ba89e7d1bfe0dfff1da2727bf4534dda9d22f6046a8b9" exitCode=0 Sep 30 20:38:52 crc kubenswrapper[4919]: I0930 20:38:52.017237 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7pv7" event={"ID":"c42a7ca7-a6d3-4b42-84ca-a5b294e96906","Type":"ContainerDied","Data":"1fef672291df678bc40ba89e7d1bfe0dfff1da2727bf4534dda9d22f6046a8b9"} Sep 30 20:38:52 crc kubenswrapper[4919]: I0930 20:38:52.017266 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7pv7" event={"ID":"c42a7ca7-a6d3-4b42-84ca-a5b294e96906","Type":"ContainerStarted","Data":"540741565e2fef042fdc43b150a452bc674c5adc63dd7f8a4e376f1d5880dacd"} Sep 30 20:38:52 crc kubenswrapper[4919]: I0930 20:38:52.019261 4919 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:38:53 crc kubenswrapper[4919]: I0930 20:38:53.031667 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7pv7" event={"ID":"c42a7ca7-a6d3-4b42-84ca-a5b294e96906","Type":"ContainerStarted","Data":"fdb9d5b68d7a5f553a61404fb7e6e42892d831fb1ac94261e65d354beb54f702"} Sep 30 20:38:54 crc kubenswrapper[4919]: I0930 20:38:54.043879 4919 generic.go:334] "Generic (PLEG): container finished" podID="c42a7ca7-a6d3-4b42-84ca-a5b294e96906" containerID="fdb9d5b68d7a5f553a61404fb7e6e42892d831fb1ac94261e65d354beb54f702" exitCode=0 Sep 30 20:38:54 crc kubenswrapper[4919]: I0930 20:38:54.044071 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7pv7" event={"ID":"c42a7ca7-a6d3-4b42-84ca-a5b294e96906","Type":"ContainerDied","Data":"fdb9d5b68d7a5f553a61404fb7e6e42892d831fb1ac94261e65d354beb54f702"} Sep 30 20:38:55 crc kubenswrapper[4919]: I0930 20:38:55.060808 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7pv7" event={"ID":"c42a7ca7-a6d3-4b42-84ca-a5b294e96906","Type":"ContainerStarted","Data":"048f91ebc5eaf0d1a8a8b9b83854f8655d9ea7c9acdd258f1dd0ec1f4928c194"} Sep 30 20:38:55 crc kubenswrapper[4919]: I0930 20:38:55.097604 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t7pv7" podStartSLOduration=2.6866119939999997 podStartE2EDuration="5.097583097s" podCreationTimestamp="2025-09-30 20:38:50 +0000 UTC" firstStartedPulling="2025-09-30 20:38:52.018946194 +0000 UTC m=+1517.134979341" lastFinishedPulling="2025-09-30 20:38:54.429917277 +0000 UTC m=+1519.545950444" observedRunningTime="2025-09-30 20:38:55.089960845 +0000 UTC m=+1520.205994012" watchObservedRunningTime="2025-09-30 20:38:55.097583097 +0000 UTC m=+1520.213616234" Sep 30 20:38:56 crc kubenswrapper[4919]: I0930 20:38:56.062097 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:38:56 crc kubenswrapper[4919]: I0930 20:38:56.062542 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:38:56 crc kubenswrapper[4919]: I0930 20:38:56.062618 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:38:56 crc kubenswrapper[4919]: I0930 20:38:56.063696 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3"} pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:38:56 crc kubenswrapper[4919]: I0930 20:38:56.063796 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" containerID="cri-o://54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" gracePeriod=600 Sep 30 20:38:56 crc kubenswrapper[4919]: E0930 20:38:56.196269 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:38:57 crc kubenswrapper[4919]: I0930 20:38:57.091129 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb371a63-6d82-453e-930e-656710b97f10" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" exitCode=0 Sep 30 20:38:57 crc kubenswrapper[4919]: I0930 20:38:57.091239 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerDied","Data":"54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3"} Sep 30 20:38:57 crc kubenswrapper[4919]: I0930 20:38:57.091678 4919 scope.go:117] "RemoveContainer" containerID="50b0f3b522dc60e1e18fadcde7bd6a100190635e5277992da10bb56412db1f04" Sep 30 20:38:57 crc kubenswrapper[4919]: I0930 20:38:57.092562 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:38:57 crc kubenswrapper[4919]: E0930 20:38:57.092993 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:39:01 crc kubenswrapper[4919]: I0930 20:39:01.127298 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:39:01 crc kubenswrapper[4919]: I0930 20:39:01.127784 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:39:01 crc kubenswrapper[4919]: I0930 20:39:01.202625 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:39:01 crc kubenswrapper[4919]: I0930 20:39:01.280819 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:39:01 crc kubenswrapper[4919]: I0930 20:39:01.456701 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t7pv7"] Sep 30 20:39:03 crc kubenswrapper[4919]: I0930 20:39:03.177919 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t7pv7" podUID="c42a7ca7-a6d3-4b42-84ca-a5b294e96906" containerName="registry-server" containerID="cri-o://048f91ebc5eaf0d1a8a8b9b83854f8655d9ea7c9acdd258f1dd0ec1f4928c194" gracePeriod=2 Sep 30 20:39:04 crc kubenswrapper[4919]: I0930 20:39:04.205838 4919 generic.go:334] "Generic (PLEG): container finished" podID="c42a7ca7-a6d3-4b42-84ca-a5b294e96906" containerID="048f91ebc5eaf0d1a8a8b9b83854f8655d9ea7c9acdd258f1dd0ec1f4928c194" exitCode=0 Sep 30 20:39:04 crc kubenswrapper[4919]: I0930 20:39:04.206379 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7pv7" event={"ID":"c42a7ca7-a6d3-4b42-84ca-a5b294e96906","Type":"ContainerDied","Data":"048f91ebc5eaf0d1a8a8b9b83854f8655d9ea7c9acdd258f1dd0ec1f4928c194"} Sep 30 20:39:04 crc kubenswrapper[4919]: I0930 20:39:04.662148 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:39:04 crc kubenswrapper[4919]: I0930 20:39:04.846614 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sl2b\" (UniqueName: \"kubernetes.io/projected/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-kube-api-access-2sl2b\") pod \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\" (UID: \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\") " Sep 30 20:39:04 crc kubenswrapper[4919]: I0930 20:39:04.846712 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-utilities\") pod \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\" (UID: \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\") " Sep 30 20:39:04 crc kubenswrapper[4919]: I0930 20:39:04.846988 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-catalog-content\") pod \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\" (UID: \"c42a7ca7-a6d3-4b42-84ca-a5b294e96906\") " Sep 30 20:39:04 crc kubenswrapper[4919]: I0930 20:39:04.847946 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-utilities" (OuterVolumeSpecName: "utilities") pod "c42a7ca7-a6d3-4b42-84ca-a5b294e96906" (UID: "c42a7ca7-a6d3-4b42-84ca-a5b294e96906"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:39:04 crc kubenswrapper[4919]: I0930 20:39:04.857391 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-kube-api-access-2sl2b" (OuterVolumeSpecName: "kube-api-access-2sl2b") pod "c42a7ca7-a6d3-4b42-84ca-a5b294e96906" (UID: "c42a7ca7-a6d3-4b42-84ca-a5b294e96906"). InnerVolumeSpecName "kube-api-access-2sl2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:39:04 crc kubenswrapper[4919]: I0930 20:39:04.921599 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c42a7ca7-a6d3-4b42-84ca-a5b294e96906" (UID: "c42a7ca7-a6d3-4b42-84ca-a5b294e96906"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:39:04 crc kubenswrapper[4919]: I0930 20:39:04.948935 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:39:04 crc kubenswrapper[4919]: I0930 20:39:04.948972 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sl2b\" (UniqueName: \"kubernetes.io/projected/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-kube-api-access-2sl2b\") on node \"crc\" DevicePath \"\"" Sep 30 20:39:04 crc kubenswrapper[4919]: I0930 20:39:04.948984 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c42a7ca7-a6d3-4b42-84ca-a5b294e96906-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:39:05 crc kubenswrapper[4919]: I0930 20:39:05.226767 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t7pv7" event={"ID":"c42a7ca7-a6d3-4b42-84ca-a5b294e96906","Type":"ContainerDied","Data":"540741565e2fef042fdc43b150a452bc674c5adc63dd7f8a4e376f1d5880dacd"} Sep 30 20:39:05 crc kubenswrapper[4919]: I0930 20:39:05.226829 4919 scope.go:117] "RemoveContainer" containerID="048f91ebc5eaf0d1a8a8b9b83854f8655d9ea7c9acdd258f1dd0ec1f4928c194" Sep 30 20:39:05 crc kubenswrapper[4919]: I0930 20:39:05.226918 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t7pv7" Sep 30 20:39:05 crc kubenswrapper[4919]: I0930 20:39:05.270344 4919 scope.go:117] "RemoveContainer" containerID="fdb9d5b68d7a5f553a61404fb7e6e42892d831fb1ac94261e65d354beb54f702" Sep 30 20:39:05 crc kubenswrapper[4919]: I0930 20:39:05.280092 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t7pv7"] Sep 30 20:39:05 crc kubenswrapper[4919]: I0930 20:39:05.295686 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t7pv7"] Sep 30 20:39:05 crc kubenswrapper[4919]: I0930 20:39:05.306053 4919 scope.go:117] "RemoveContainer" containerID="1fef672291df678bc40ba89e7d1bfe0dfff1da2727bf4534dda9d22f6046a8b9" Sep 30 20:39:05 crc kubenswrapper[4919]: I0930 20:39:05.656883 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c42a7ca7-a6d3-4b42-84ca-a5b294e96906" path="/var/lib/kubelet/pods/c42a7ca7-a6d3-4b42-84ca-a5b294e96906/volumes" Sep 30 20:39:09 crc kubenswrapper[4919]: I0930 20:39:09.633050 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:39:09 crc kubenswrapper[4919]: E0930 20:39:09.633916 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:39:22 crc kubenswrapper[4919]: I0930 20:39:22.633027 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:39:22 crc kubenswrapper[4919]: E0930 20:39:22.633809 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:39:34 crc kubenswrapper[4919]: I0930 20:39:34.632840 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:39:34 crc kubenswrapper[4919]: E0930 20:39:34.633956 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:39:38 crc kubenswrapper[4919]: I0930 20:39:38.238013 4919 scope.go:117] "RemoveContainer" containerID="c68cd5f8cfa139223a59a999d0eca3a1b46126cfb5a9f5b3d2960a79d3bc97e5" Sep 30 20:39:38 crc kubenswrapper[4919]: I0930 20:39:38.285895 4919 scope.go:117] "RemoveContainer" containerID="53f6654535ad83457c51707bd47410a0885cdebcf7a93a230c7b9a2010dc61cd" Sep 30 20:39:38 crc kubenswrapper[4919]: I0930 20:39:38.316017 4919 scope.go:117] "RemoveContainer" containerID="ed46622a300dba9708a1f29da97c0bbd2aff1b4b843944464929611b34488b87" Sep 30 20:39:48 crc kubenswrapper[4919]: I0930 20:39:48.633747 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:39:48 crc kubenswrapper[4919]: E0930 20:39:48.634824 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:39:56 crc kubenswrapper[4919]: I0930 20:39:56.819061 4919 generic.go:334] "Generic (PLEG): container finished" podID="8ed94f60-cd6c-4559-879b-de97554383c6" containerID="d991b94221ede420d817b7a836c826f4e29b99a980884bdb9c8d9ac4c0bc0ae2" exitCode=0 Sep 30 20:39:56 crc kubenswrapper[4919]: I0930 20:39:56.819159 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" event={"ID":"8ed94f60-cd6c-4559-879b-de97554383c6","Type":"ContainerDied","Data":"d991b94221ede420d817b7a836c826f4e29b99a980884bdb9c8d9ac4c0bc0ae2"} Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.329675 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.361074 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chh5p\" (UniqueName: \"kubernetes.io/projected/8ed94f60-cd6c-4559-879b-de97554383c6-kube-api-access-chh5p\") pod \"8ed94f60-cd6c-4559-879b-de97554383c6\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.361246 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-inventory\") pod \"8ed94f60-cd6c-4559-879b-de97554383c6\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.361357 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-ssh-key\") pod \"8ed94f60-cd6c-4559-879b-de97554383c6\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.361458 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-bootstrap-combined-ca-bundle\") pod \"8ed94f60-cd6c-4559-879b-de97554383c6\" (UID: \"8ed94f60-cd6c-4559-879b-de97554383c6\") " Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.368061 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ed94f60-cd6c-4559-879b-de97554383c6-kube-api-access-chh5p" (OuterVolumeSpecName: "kube-api-access-chh5p") pod "8ed94f60-cd6c-4559-879b-de97554383c6" (UID: "8ed94f60-cd6c-4559-879b-de97554383c6"). InnerVolumeSpecName "kube-api-access-chh5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.373067 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "8ed94f60-cd6c-4559-879b-de97554383c6" (UID: "8ed94f60-cd6c-4559-879b-de97554383c6"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.400676 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8ed94f60-cd6c-4559-879b-de97554383c6" (UID: "8ed94f60-cd6c-4559-879b-de97554383c6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.405790 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-inventory" (OuterVolumeSpecName: "inventory") pod "8ed94f60-cd6c-4559-879b-de97554383c6" (UID: "8ed94f60-cd6c-4559-879b-de97554383c6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.464495 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.464544 4919 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.464565 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chh5p\" (UniqueName: \"kubernetes.io/projected/8ed94f60-cd6c-4559-879b-de97554383c6-kube-api-access-chh5p\") on node \"crc\" DevicePath \"\"" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.464586 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8ed94f60-cd6c-4559-879b-de97554383c6-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.849142 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" event={"ID":"8ed94f60-cd6c-4559-879b-de97554383c6","Type":"ContainerDied","Data":"93faeec87c32a52fba948829f93ea77c8c642bda325eac55deb6731c6284cbfb"} Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.849606 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93faeec87c32a52fba948829f93ea77c8c642bda325eac55deb6731c6284cbfb" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.849244 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.942988 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls"] Sep 30 20:39:58 crc kubenswrapper[4919]: E0930 20:39:58.943446 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ed94f60-cd6c-4559-879b-de97554383c6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.943468 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ed94f60-cd6c-4559-879b-de97554383c6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 20:39:58 crc kubenswrapper[4919]: E0930 20:39:58.943480 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c42a7ca7-a6d3-4b42-84ca-a5b294e96906" containerName="extract-utilities" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.943490 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c42a7ca7-a6d3-4b42-84ca-a5b294e96906" containerName="extract-utilities" Sep 30 20:39:58 crc kubenswrapper[4919]: E0930 20:39:58.943508 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c42a7ca7-a6d3-4b42-84ca-a5b294e96906" containerName="extract-content" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.943516 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c42a7ca7-a6d3-4b42-84ca-a5b294e96906" containerName="extract-content" Sep 30 20:39:58 crc kubenswrapper[4919]: E0930 20:39:58.943540 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c42a7ca7-a6d3-4b42-84ca-a5b294e96906" containerName="registry-server" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.943547 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c42a7ca7-a6d3-4b42-84ca-a5b294e96906" containerName="registry-server" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.943775 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="c42a7ca7-a6d3-4b42-84ca-a5b294e96906" containerName="registry-server" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.943791 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ed94f60-cd6c-4559-879b-de97554383c6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.944437 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.947542 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.947786 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.948002 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.950203 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.961029 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls"] Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.982933 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d811837-df5f-49b4-bd4f-88bf57aa20b4-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s4wls\" (UID: \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.982997 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5r5h\" (UniqueName: \"kubernetes.io/projected/8d811837-df5f-49b4-bd4f-88bf57aa20b4-kube-api-access-h5r5h\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s4wls\" (UID: \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:39:58 crc kubenswrapper[4919]: I0930 20:39:58.983062 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d811837-df5f-49b4-bd4f-88bf57aa20b4-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s4wls\" (UID: \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:39:59 crc kubenswrapper[4919]: I0930 20:39:59.084578 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d811837-df5f-49b4-bd4f-88bf57aa20b4-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s4wls\" (UID: \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:39:59 crc kubenswrapper[4919]: I0930 20:39:59.084647 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5r5h\" (UniqueName: \"kubernetes.io/projected/8d811837-df5f-49b4-bd4f-88bf57aa20b4-kube-api-access-h5r5h\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s4wls\" (UID: \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:39:59 crc kubenswrapper[4919]: I0930 20:39:59.084698 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d811837-df5f-49b4-bd4f-88bf57aa20b4-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s4wls\" (UID: \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:39:59 crc kubenswrapper[4919]: I0930 20:39:59.089011 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d811837-df5f-49b4-bd4f-88bf57aa20b4-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s4wls\" (UID: \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:39:59 crc kubenswrapper[4919]: I0930 20:39:59.089912 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d811837-df5f-49b4-bd4f-88bf57aa20b4-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s4wls\" (UID: \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:39:59 crc kubenswrapper[4919]: I0930 20:39:59.100757 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5r5h\" (UniqueName: \"kubernetes.io/projected/8d811837-df5f-49b4-bd4f-88bf57aa20b4-kube-api-access-h5r5h\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s4wls\" (UID: \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:39:59 crc kubenswrapper[4919]: I0930 20:39:59.264605 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:39:59 crc kubenswrapper[4919]: I0930 20:39:59.613630 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls"] Sep 30 20:39:59 crc kubenswrapper[4919]: I0930 20:39:59.867651 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" event={"ID":"8d811837-df5f-49b4-bd4f-88bf57aa20b4","Type":"ContainerStarted","Data":"85fbd1f57cef20873f904cd60968a0f5194c415beb8952ee3bb90e3901c78fbb"} Sep 30 20:40:00 crc kubenswrapper[4919]: I0930 20:40:00.882764 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" event={"ID":"8d811837-df5f-49b4-bd4f-88bf57aa20b4","Type":"ContainerStarted","Data":"31a17750b47be99a03ebf6192d9f27d142aa63cc3ddc9ce39c4e350568924189"} Sep 30 20:40:00 crc kubenswrapper[4919]: I0930 20:40:00.913821 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" podStartSLOduration=2.419853984 podStartE2EDuration="2.91378969s" podCreationTimestamp="2025-09-30 20:39:58 +0000 UTC" firstStartedPulling="2025-09-30 20:39:59.621623251 +0000 UTC m=+1584.737656388" lastFinishedPulling="2025-09-30 20:40:00.115558927 +0000 UTC m=+1585.231592094" observedRunningTime="2025-09-30 20:40:00.900734341 +0000 UTC m=+1586.016767518" watchObservedRunningTime="2025-09-30 20:40:00.91378969 +0000 UTC m=+1586.029822907" Sep 30 20:40:01 crc kubenswrapper[4919]: I0930 20:40:01.632798 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:40:01 crc kubenswrapper[4919]: E0930 20:40:01.633897 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:40:15 crc kubenswrapper[4919]: I0930 20:40:15.652224 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:40:15 crc kubenswrapper[4919]: E0930 20:40:15.653967 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:40:30 crc kubenswrapper[4919]: I0930 20:40:30.637685 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:40:30 crc kubenswrapper[4919]: E0930 20:40:30.641470 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:40:38 crc kubenswrapper[4919]: I0930 20:40:38.392752 4919 scope.go:117] "RemoveContainer" containerID="dab6eff410e1122f1e9ba38e4079c77326c7e04f9bebfddcd0de72b1b18713f5" Sep 30 20:40:38 crc kubenswrapper[4919]: I0930 20:40:38.436379 4919 scope.go:117] "RemoveContainer" containerID="5248be8c9678cf75b910435823393b8144144bb14a7bb5e53bf3c0614d490337" Sep 30 20:40:38 crc kubenswrapper[4919]: I0930 20:40:38.509308 4919 scope.go:117] "RemoveContainer" containerID="8c7ff0a363b28206ddd30ee3a19f012450886a5f4f51516ef599d1e45f754eb8" Sep 30 20:40:38 crc kubenswrapper[4919]: I0930 20:40:38.540863 4919 scope.go:117] "RemoveContainer" containerID="5450878e4e9cde6fc7f510d20eab70bd7846d82a764214f69466f5d6fa3aefb8" Sep 30 20:40:44 crc kubenswrapper[4919]: I0930 20:40:44.632853 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:40:44 crc kubenswrapper[4919]: E0930 20:40:44.633664 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:40:45 crc kubenswrapper[4919]: I0930 20:40:45.091268 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-ztsl6"] Sep 30 20:40:45 crc kubenswrapper[4919]: I0930 20:40:45.102583 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-ztsl6"] Sep 30 20:40:45 crc kubenswrapper[4919]: I0930 20:40:45.647635 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6416142-17e9-4398-a65d-10dbcfc06411" path="/var/lib/kubelet/pods/e6416142-17e9-4398-a65d-10dbcfc06411/volumes" Sep 30 20:40:49 crc kubenswrapper[4919]: I0930 20:40:49.052772 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-9lx6b"] Sep 30 20:40:49 crc kubenswrapper[4919]: I0930 20:40:49.067572 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-f2fsn"] Sep 30 20:40:49 crc kubenswrapper[4919]: I0930 20:40:49.082097 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-9lx6b"] Sep 30 20:40:49 crc kubenswrapper[4919]: I0930 20:40:49.098333 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-f2fsn"] Sep 30 20:40:49 crc kubenswrapper[4919]: I0930 20:40:49.654111 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ea561dc-1efe-4e77-8b93-690b706e4125" path="/var/lib/kubelet/pods/5ea561dc-1efe-4e77-8b93-690b706e4125/volumes" Sep 30 20:40:49 crc kubenswrapper[4919]: I0930 20:40:49.655758 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61966895-d5a4-4b29-8177-623b9f37ae45" path="/var/lib/kubelet/pods/61966895-d5a4-4b29-8177-623b9f37ae45/volumes" Sep 30 20:40:56 crc kubenswrapper[4919]: I0930 20:40:56.056235 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-1482-account-create-lz8v2"] Sep 30 20:40:56 crc kubenswrapper[4919]: I0930 20:40:56.069459 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-1482-account-create-lz8v2"] Sep 30 20:40:57 crc kubenswrapper[4919]: I0930 20:40:57.653058 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4041a194-2042-411c-b58b-bec5e4ef9f2d" path="/var/lib/kubelet/pods/4041a194-2042-411c-b58b-bec5e4ef9f2d/volumes" Sep 30 20:40:59 crc kubenswrapper[4919]: I0930 20:40:59.044686 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-f84d-account-create-88k77"] Sep 30 20:40:59 crc kubenswrapper[4919]: I0930 20:40:59.056989 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-f84d-account-create-88k77"] Sep 30 20:40:59 crc kubenswrapper[4919]: I0930 20:40:59.633853 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:40:59 crc kubenswrapper[4919]: E0930 20:40:59.634479 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:40:59 crc kubenswrapper[4919]: I0930 20:40:59.648515 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63edfbc5-0bf7-48c8-87c3-94874e37e8d7" path="/var/lib/kubelet/pods/63edfbc5-0bf7-48c8-87c3-94874e37e8d7/volumes" Sep 30 20:41:00 crc kubenswrapper[4919]: I0930 20:41:00.025723 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-088d-account-create-lh9jg"] Sep 30 20:41:00 crc kubenswrapper[4919]: I0930 20:41:00.033806 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-088d-account-create-lh9jg"] Sep 30 20:41:01 crc kubenswrapper[4919]: I0930 20:41:01.642329 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b424fd3-c3b3-4bbf-8583-9e5788c0038b" path="/var/lib/kubelet/pods/2b424fd3-c3b3-4bbf-8583-9e5788c0038b/volumes" Sep 30 20:41:14 crc kubenswrapper[4919]: I0930 20:41:14.632943 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:41:14 crc kubenswrapper[4919]: E0930 20:41:14.634262 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:41:25 crc kubenswrapper[4919]: I0930 20:41:25.048770 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-xp5t9"] Sep 30 20:41:25 crc kubenswrapper[4919]: I0930 20:41:25.061882 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-wc9vm"] Sep 30 20:41:25 crc kubenswrapper[4919]: I0930 20:41:25.069999 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-wc9vm"] Sep 30 20:41:25 crc kubenswrapper[4919]: I0930 20:41:25.077997 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-4ppnv"] Sep 30 20:41:25 crc kubenswrapper[4919]: I0930 20:41:25.087762 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-xp5t9"] Sep 30 20:41:25 crc kubenswrapper[4919]: I0930 20:41:25.094673 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-4ppnv"] Sep 30 20:41:25 crc kubenswrapper[4919]: I0930 20:41:25.646184 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06a29aeb-b634-4850-96ad-559fa2318076" path="/var/lib/kubelet/pods/06a29aeb-b634-4850-96ad-559fa2318076/volumes" Sep 30 20:41:25 crc kubenswrapper[4919]: I0930 20:41:25.647296 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7" path="/var/lib/kubelet/pods/2cf7ed6e-7fd8-46e9-94ac-c6357e7dd7d7/volumes" Sep 30 20:41:25 crc kubenswrapper[4919]: I0930 20:41:25.647761 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8548a93e-e608-46bd-a4da-32876305fe67" path="/var/lib/kubelet/pods/8548a93e-e608-46bd-a4da-32876305fe67/volumes" Sep 30 20:41:29 crc kubenswrapper[4919]: I0930 20:41:29.051616 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-8zx8k"] Sep 30 20:41:29 crc kubenswrapper[4919]: I0930 20:41:29.065443 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-8zx8k"] Sep 30 20:41:29 crc kubenswrapper[4919]: I0930 20:41:29.632576 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:41:29 crc kubenswrapper[4919]: E0930 20:41:29.633022 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:41:29 crc kubenswrapper[4919]: I0930 20:41:29.649517 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15afefc9-4042-464a-ae52-966e5b6f0ffb" path="/var/lib/kubelet/pods/15afefc9-4042-464a-ae52-966e5b6f0ffb/volumes" Sep 30 20:41:30 crc kubenswrapper[4919]: I0930 20:41:30.035048 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-xxfbx"] Sep 30 20:41:30 crc kubenswrapper[4919]: I0930 20:41:30.044677 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-xxfbx"] Sep 30 20:41:31 crc kubenswrapper[4919]: I0930 20:41:31.656791 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbc82d06-dc8f-4fc1-884f-43213a1b4d36" path="/var/lib/kubelet/pods/bbc82d06-dc8f-4fc1-884f-43213a1b4d36/volumes" Sep 30 20:41:33 crc kubenswrapper[4919]: I0930 20:41:33.928501 4919 generic.go:334] "Generic (PLEG): container finished" podID="8d811837-df5f-49b4-bd4f-88bf57aa20b4" containerID="31a17750b47be99a03ebf6192d9f27d142aa63cc3ddc9ce39c4e350568924189" exitCode=0 Sep 30 20:41:33 crc kubenswrapper[4919]: I0930 20:41:33.928617 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" event={"ID":"8d811837-df5f-49b4-bd4f-88bf57aa20b4","Type":"ContainerDied","Data":"31a17750b47be99a03ebf6192d9f27d142aa63cc3ddc9ce39c4e350568924189"} Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.493469 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.677008 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d811837-df5f-49b4-bd4f-88bf57aa20b4-inventory\") pod \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\" (UID: \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\") " Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.677065 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5r5h\" (UniqueName: \"kubernetes.io/projected/8d811837-df5f-49b4-bd4f-88bf57aa20b4-kube-api-access-h5r5h\") pod \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\" (UID: \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\") " Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.677373 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d811837-df5f-49b4-bd4f-88bf57aa20b4-ssh-key\") pod \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\" (UID: \"8d811837-df5f-49b4-bd4f-88bf57aa20b4\") " Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.685969 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d811837-df5f-49b4-bd4f-88bf57aa20b4-kube-api-access-h5r5h" (OuterVolumeSpecName: "kube-api-access-h5r5h") pod "8d811837-df5f-49b4-bd4f-88bf57aa20b4" (UID: "8d811837-df5f-49b4-bd4f-88bf57aa20b4"). InnerVolumeSpecName "kube-api-access-h5r5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.718061 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d811837-df5f-49b4-bd4f-88bf57aa20b4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8d811837-df5f-49b4-bd4f-88bf57aa20b4" (UID: "8d811837-df5f-49b4-bd4f-88bf57aa20b4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.729096 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d811837-df5f-49b4-bd4f-88bf57aa20b4-inventory" (OuterVolumeSpecName: "inventory") pod "8d811837-df5f-49b4-bd4f-88bf57aa20b4" (UID: "8d811837-df5f-49b4-bd4f-88bf57aa20b4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.797006 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8d811837-df5f-49b4-bd4f-88bf57aa20b4-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.797039 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5r5h\" (UniqueName: \"kubernetes.io/projected/8d811837-df5f-49b4-bd4f-88bf57aa20b4-kube-api-access-h5r5h\") on node \"crc\" DevicePath \"\"" Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.797063 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8d811837-df5f-49b4-bd4f-88bf57aa20b4-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.960026 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" event={"ID":"8d811837-df5f-49b4-bd4f-88bf57aa20b4","Type":"ContainerDied","Data":"85fbd1f57cef20873f904cd60968a0f5194c415beb8952ee3bb90e3901c78fbb"} Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.960094 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85fbd1f57cef20873f904cd60968a0f5194c415beb8952ee3bb90e3901c78fbb" Sep 30 20:41:35 crc kubenswrapper[4919]: I0930 20:41:35.960136 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s4wls" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.036177 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8c41-account-create-crpcb"] Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.050291 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8c41-account-create-crpcb"] Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.072495 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf"] Sep 30 20:41:36 crc kubenswrapper[4919]: E0930 20:41:36.072922 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d811837-df5f-49b4-bd4f-88bf57aa20b4" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.072944 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d811837-df5f-49b4-bd4f-88bf57aa20b4" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.074375 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d811837-df5f-49b4-bd4f-88bf57aa20b4" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.075139 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.076991 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.077709 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.077959 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.079115 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.098794 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf"] Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.205445 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27370bb4-04b4-4f01-b60d-e45c208a51a0-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf\" (UID: \"27370bb4-04b4-4f01-b60d-e45c208a51a0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.205847 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n954\" (UniqueName: \"kubernetes.io/projected/27370bb4-04b4-4f01-b60d-e45c208a51a0-kube-api-access-4n954\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf\" (UID: \"27370bb4-04b4-4f01-b60d-e45c208a51a0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.205978 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27370bb4-04b4-4f01-b60d-e45c208a51a0-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf\" (UID: \"27370bb4-04b4-4f01-b60d-e45c208a51a0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.308681 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27370bb4-04b4-4f01-b60d-e45c208a51a0-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf\" (UID: \"27370bb4-04b4-4f01-b60d-e45c208a51a0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.308864 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n954\" (UniqueName: \"kubernetes.io/projected/27370bb4-04b4-4f01-b60d-e45c208a51a0-kube-api-access-4n954\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf\" (UID: \"27370bb4-04b4-4f01-b60d-e45c208a51a0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.308923 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27370bb4-04b4-4f01-b60d-e45c208a51a0-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf\" (UID: \"27370bb4-04b4-4f01-b60d-e45c208a51a0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.326489 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27370bb4-04b4-4f01-b60d-e45c208a51a0-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf\" (UID: \"27370bb4-04b4-4f01-b60d-e45c208a51a0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.326758 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27370bb4-04b4-4f01-b60d-e45c208a51a0-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf\" (UID: \"27370bb4-04b4-4f01-b60d-e45c208a51a0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.345941 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n954\" (UniqueName: \"kubernetes.io/projected/27370bb4-04b4-4f01-b60d-e45c208a51a0-kube-api-access-4n954\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf\" (UID: \"27370bb4-04b4-4f01-b60d-e45c208a51a0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.408667 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:41:36 crc kubenswrapper[4919]: I0930 20:41:36.973913 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf"] Sep 30 20:41:37 crc kubenswrapper[4919]: I0930 20:41:37.660483 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa111051-1cd6-4015-b901-d7247d9a6128" path="/var/lib/kubelet/pods/aa111051-1cd6-4015-b901-d7247d9a6128/volumes" Sep 30 20:41:37 crc kubenswrapper[4919]: I0930 20:41:37.985940 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" event={"ID":"27370bb4-04b4-4f01-b60d-e45c208a51a0","Type":"ContainerStarted","Data":"e93ade47af80db734669e2242fee6be70ef35ded94f0379987e87b1abfb89977"} Sep 30 20:41:37 crc kubenswrapper[4919]: I0930 20:41:37.987690 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" event={"ID":"27370bb4-04b4-4f01-b60d-e45c208a51a0","Type":"ContainerStarted","Data":"81a22cfebaf09a08fbb384fe0a65eba0d45959e78eac48930a2f455e3ffea552"} Sep 30 20:41:38 crc kubenswrapper[4919]: I0930 20:41:38.007150 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" podStartSLOduration=1.476596327 podStartE2EDuration="2.007128645s" podCreationTimestamp="2025-09-30 20:41:36 +0000 UTC" firstStartedPulling="2025-09-30 20:41:36.97676192 +0000 UTC m=+1682.092795087" lastFinishedPulling="2025-09-30 20:41:37.507294268 +0000 UTC m=+1682.623327405" observedRunningTime="2025-09-30 20:41:38.002590073 +0000 UTC m=+1683.118623200" watchObservedRunningTime="2025-09-30 20:41:38.007128645 +0000 UTC m=+1683.123161782" Sep 30 20:41:38 crc kubenswrapper[4919]: I0930 20:41:38.632842 4919 scope.go:117] "RemoveContainer" containerID="f1279c802da8a7639ef68e428cae15e02484fda82bc523aa06d66c57bd0e6b00" Sep 30 20:41:38 crc kubenswrapper[4919]: I0930 20:41:38.666043 4919 scope.go:117] "RemoveContainer" containerID="63763b0f143e7d86d43bc46699ea91cd4ea82bfe9a534f66ef05b06aad53737d" Sep 30 20:41:38 crc kubenswrapper[4919]: I0930 20:41:38.728514 4919 scope.go:117] "RemoveContainer" containerID="8960f60e742ad16e0744f3e91c0ef748e0e41594619634fde7ad11556f711ce9" Sep 30 20:41:38 crc kubenswrapper[4919]: I0930 20:41:38.798991 4919 scope.go:117] "RemoveContainer" containerID="fdfdd656f1044b84a2301fd80854d1b556da700a1929d71dc30e9f26695bd605" Sep 30 20:41:38 crc kubenswrapper[4919]: I0930 20:41:38.824905 4919 scope.go:117] "RemoveContainer" containerID="f18973f3827b25a2c80ff87518e688a36ad51681aab6443b2d3f3241a54dbae1" Sep 30 20:41:38 crc kubenswrapper[4919]: I0930 20:41:38.871708 4919 scope.go:117] "RemoveContainer" containerID="87bf306e2d40a1cd713990bd45fe106cef7d87b6b8be2220edad388b98e99617" Sep 30 20:41:38 crc kubenswrapper[4919]: I0930 20:41:38.922835 4919 scope.go:117] "RemoveContainer" containerID="ebb9272c1b0f8828ce5ba0881c736cc29f60d950f43cdc9c0816043deea425fa" Sep 30 20:41:38 crc kubenswrapper[4919]: I0930 20:41:38.948417 4919 scope.go:117] "RemoveContainer" containerID="4130af02109cfee47a5aa40a8b2cda64efc03e92d7cc9e870ac57ad1811c299a" Sep 30 20:41:38 crc kubenswrapper[4919]: I0930 20:41:38.969041 4919 scope.go:117] "RemoveContainer" containerID="be303333ecd730045e05fec9bf10282740e6342a1d9d2a57cf045b4d90b731e1" Sep 30 20:41:38 crc kubenswrapper[4919]: I0930 20:41:38.994968 4919 scope.go:117] "RemoveContainer" containerID="30edca9fd6d10cfaadb28401f43560f9697b1c5720dabb4524ef279ae3d85d3e" Sep 30 20:41:39 crc kubenswrapper[4919]: I0930 20:41:39.029776 4919 scope.go:117] "RemoveContainer" containerID="679d3ff46779fc82e3def6d6fabf19e93627992c0c31b657073b2839d2708a5a" Sep 30 20:41:39 crc kubenswrapper[4919]: I0930 20:41:39.061043 4919 scope.go:117] "RemoveContainer" containerID="d965fe69f11531bdf4fab973c86209af712bd6280b255f2906cd980a346d06ea" Sep 30 20:41:41 crc kubenswrapper[4919]: I0930 20:41:41.049540 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-0dfd-account-create-cknrq"] Sep 30 20:41:41 crc kubenswrapper[4919]: I0930 20:41:41.064736 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7448-account-create-sz726"] Sep 30 20:41:41 crc kubenswrapper[4919]: I0930 20:41:41.079277 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-0dfd-account-create-cknrq"] Sep 30 20:41:41 crc kubenswrapper[4919]: I0930 20:41:41.089566 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7448-account-create-sz726"] Sep 30 20:41:41 crc kubenswrapper[4919]: I0930 20:41:41.650194 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="315fa417-7e9e-4c30-ac6e-3dd472837602" path="/var/lib/kubelet/pods/315fa417-7e9e-4c30-ac6e-3dd472837602/volumes" Sep 30 20:41:41 crc kubenswrapper[4919]: I0930 20:41:41.650893 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0" path="/var/lib/kubelet/pods/ce5e1f4a-1a65-4c03-bf3c-1f669b4731b0/volumes" Sep 30 20:41:44 crc kubenswrapper[4919]: I0930 20:41:44.633437 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:41:44 crc kubenswrapper[4919]: E0930 20:41:44.635776 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:41:45 crc kubenswrapper[4919]: I0930 20:41:45.040570 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-vnrf4"] Sep 30 20:41:45 crc kubenswrapper[4919]: I0930 20:41:45.057042 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-vnrf4"] Sep 30 20:41:45 crc kubenswrapper[4919]: I0930 20:41:45.650070 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7db423ab-427e-425f-a5d4-10ec71302c12" path="/var/lib/kubelet/pods/7db423ab-427e-425f-a5d4-10ec71302c12/volumes" Sep 30 20:41:48 crc kubenswrapper[4919]: I0930 20:41:48.039539 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-gtwwt"] Sep 30 20:41:48 crc kubenswrapper[4919]: I0930 20:41:48.052666 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-gtwwt"] Sep 30 20:41:49 crc kubenswrapper[4919]: I0930 20:41:49.648658 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d738f62-6454-4ed9-a506-a3ffda2df598" path="/var/lib/kubelet/pods/9d738f62-6454-4ed9-a506-a3ffda2df598/volumes" Sep 30 20:41:58 crc kubenswrapper[4919]: I0930 20:41:58.632924 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:41:58 crc kubenswrapper[4919]: E0930 20:41:58.634270 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:41:59 crc kubenswrapper[4919]: I0930 20:41:59.068811 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-jphnh"] Sep 30 20:41:59 crc kubenswrapper[4919]: I0930 20:41:59.087972 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-jphnh"] Sep 30 20:41:59 crc kubenswrapper[4919]: I0930 20:41:59.654464 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39cdbc20-9bb1-4527-8195-f2b885c676a4" path="/var/lib/kubelet/pods/39cdbc20-9bb1-4527-8195-f2b885c676a4/volumes" Sep 30 20:42:09 crc kubenswrapper[4919]: I0930 20:42:09.070144 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-pl4gj"] Sep 30 20:42:09 crc kubenswrapper[4919]: I0930 20:42:09.084201 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-pl4gj"] Sep 30 20:42:09 crc kubenswrapper[4919]: I0930 20:42:09.642816 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="737cb8aa-63c3-4a59-893c-3d5075795304" path="/var/lib/kubelet/pods/737cb8aa-63c3-4a59-893c-3d5075795304/volumes" Sep 30 20:42:11 crc kubenswrapper[4919]: I0930 20:42:11.633070 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:42:11 crc kubenswrapper[4919]: E0930 20:42:11.633908 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:42:25 crc kubenswrapper[4919]: I0930 20:42:25.645296 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:42:25 crc kubenswrapper[4919]: E0930 20:42:25.646726 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:42:34 crc kubenswrapper[4919]: I0930 20:42:34.047640 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-sp45k"] Sep 30 20:42:34 crc kubenswrapper[4919]: I0930 20:42:34.055421 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-sp45k"] Sep 30 20:42:35 crc kubenswrapper[4919]: I0930 20:42:35.663966 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d660eb4-7718-4b2b-a834-9b7d0d2b64a4" path="/var/lib/kubelet/pods/8d660eb4-7718-4b2b-a834-9b7d0d2b64a4/volumes" Sep 30 20:42:39 crc kubenswrapper[4919]: I0930 20:42:39.292624 4919 scope.go:117] "RemoveContainer" containerID="a93c041930d3298e860bfe7641781371461cc363eb119d276618c926fa30c0ca" Sep 30 20:42:39 crc kubenswrapper[4919]: I0930 20:42:39.340610 4919 scope.go:117] "RemoveContainer" containerID="08dc050f2ff3494df08cbc51f4613bf1a4cc13347e0567fdc628b577360b1de1" Sep 30 20:42:39 crc kubenswrapper[4919]: I0930 20:42:39.452284 4919 scope.go:117] "RemoveContainer" containerID="a2bd35679f61d726a7c65c57e3521c963095ad4b26c9ffc459f4eeae3093670c" Sep 30 20:42:39 crc kubenswrapper[4919]: I0930 20:42:39.495717 4919 scope.go:117] "RemoveContainer" containerID="86c4b320d58bfc6e95fb173638d692872c481fabdc18fde363cd7e6907de54f9" Sep 30 20:42:39 crc kubenswrapper[4919]: I0930 20:42:39.541761 4919 scope.go:117] "RemoveContainer" containerID="f2e6cd8e280ead561c63fbecc47f9cf11fdc13114c08ceefc02b347301650935" Sep 30 20:42:39 crc kubenswrapper[4919]: I0930 20:42:39.588985 4919 scope.go:117] "RemoveContainer" containerID="12a009d45ec0aac6c0db0e01f5c6af7d2ad76cf868bf9c3a91ca2368f49af468" Sep 30 20:42:39 crc kubenswrapper[4919]: I0930 20:42:39.618734 4919 scope.go:117] "RemoveContainer" containerID="8e4b6f20e5c822c9f587c4cac1efc4bb6ec2f33f860884d74dfb01efec1bfee7" Sep 30 20:42:39 crc kubenswrapper[4919]: I0930 20:42:39.633450 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:42:39 crc kubenswrapper[4919]: E0930 20:42:39.634252 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:42:53 crc kubenswrapper[4919]: I0930 20:42:53.634174 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:42:53 crc kubenswrapper[4919]: E0930 20:42:53.635590 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:42:58 crc kubenswrapper[4919]: I0930 20:42:58.937600 4919 generic.go:334] "Generic (PLEG): container finished" podID="27370bb4-04b4-4f01-b60d-e45c208a51a0" containerID="e93ade47af80db734669e2242fee6be70ef35ded94f0379987e87b1abfb89977" exitCode=0 Sep 30 20:42:58 crc kubenswrapper[4919]: I0930 20:42:58.937675 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" event={"ID":"27370bb4-04b4-4f01-b60d-e45c208a51a0","Type":"ContainerDied","Data":"e93ade47af80db734669e2242fee6be70ef35ded94f0379987e87b1abfb89977"} Sep 30 20:42:59 crc kubenswrapper[4919]: I0930 20:42:59.054302 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-qpzv7"] Sep 30 20:42:59 crc kubenswrapper[4919]: I0930 20:42:59.066596 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-5f5gt"] Sep 30 20:42:59 crc kubenswrapper[4919]: I0930 20:42:59.075085 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-5f5gt"] Sep 30 20:42:59 crc kubenswrapper[4919]: I0930 20:42:59.083517 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-qpzv7"] Sep 30 20:42:59 crc kubenswrapper[4919]: I0930 20:42:59.653880 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a78a5894-3d96-47fd-af15-15a6c66eb554" path="/var/lib/kubelet/pods/a78a5894-3d96-47fd-af15-15a6c66eb554/volumes" Sep 30 20:42:59 crc kubenswrapper[4919]: I0930 20:42:59.654534 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c442ecc0-a212-481d-add8-69ceb0c1cd1a" path="/var/lib/kubelet/pods/c442ecc0-a212-481d-add8-69ceb0c1cd1a/volumes" Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.043743 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-hc97v"] Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.077255 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-hc97v"] Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.432382 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.494283 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27370bb4-04b4-4f01-b60d-e45c208a51a0-inventory\") pod \"27370bb4-04b4-4f01-b60d-e45c208a51a0\" (UID: \"27370bb4-04b4-4f01-b60d-e45c208a51a0\") " Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.494376 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4n954\" (UniqueName: \"kubernetes.io/projected/27370bb4-04b4-4f01-b60d-e45c208a51a0-kube-api-access-4n954\") pod \"27370bb4-04b4-4f01-b60d-e45c208a51a0\" (UID: \"27370bb4-04b4-4f01-b60d-e45c208a51a0\") " Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.494563 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27370bb4-04b4-4f01-b60d-e45c208a51a0-ssh-key\") pod \"27370bb4-04b4-4f01-b60d-e45c208a51a0\" (UID: \"27370bb4-04b4-4f01-b60d-e45c208a51a0\") " Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.502933 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27370bb4-04b4-4f01-b60d-e45c208a51a0-kube-api-access-4n954" (OuterVolumeSpecName: "kube-api-access-4n954") pod "27370bb4-04b4-4f01-b60d-e45c208a51a0" (UID: "27370bb4-04b4-4f01-b60d-e45c208a51a0"). InnerVolumeSpecName "kube-api-access-4n954". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.527099 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27370bb4-04b4-4f01-b60d-e45c208a51a0-inventory" (OuterVolumeSpecName: "inventory") pod "27370bb4-04b4-4f01-b60d-e45c208a51a0" (UID: "27370bb4-04b4-4f01-b60d-e45c208a51a0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.543956 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27370bb4-04b4-4f01-b60d-e45c208a51a0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "27370bb4-04b4-4f01-b60d-e45c208a51a0" (UID: "27370bb4-04b4-4f01-b60d-e45c208a51a0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.597505 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27370bb4-04b4-4f01-b60d-e45c208a51a0-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.597540 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27370bb4-04b4-4f01-b60d-e45c208a51a0-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.597553 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4n954\" (UniqueName: \"kubernetes.io/projected/27370bb4-04b4-4f01-b60d-e45c208a51a0-kube-api-access-4n954\") on node \"crc\" DevicePath \"\"" Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.958768 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" event={"ID":"27370bb4-04b4-4f01-b60d-e45c208a51a0","Type":"ContainerDied","Data":"81a22cfebaf09a08fbb384fe0a65eba0d45959e78eac48930a2f455e3ffea552"} Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.958995 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81a22cfebaf09a08fbb384fe0a65eba0d45959e78eac48930a2f455e3ffea552" Sep 30 20:43:00 crc kubenswrapper[4919]: I0930 20:43:00.959065 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.063059 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4"] Sep 30 20:43:01 crc kubenswrapper[4919]: E0930 20:43:01.066341 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27370bb4-04b4-4f01-b60d-e45c208a51a0" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.066390 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="27370bb4-04b4-4f01-b60d-e45c208a51a0" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.066947 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="27370bb4-04b4-4f01-b60d-e45c208a51a0" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.069616 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.074292 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.074605 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.075229 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.075277 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.095991 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4"] Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.208949 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c9jp\" (UniqueName: \"kubernetes.io/projected/b0af6183-c25a-420d-968c-73d8341d5547-kube-api-access-5c9jp\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4\" (UID: \"b0af6183-c25a-420d-968c-73d8341d5547\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.209314 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0af6183-c25a-420d-968c-73d8341d5547-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4\" (UID: \"b0af6183-c25a-420d-968c-73d8341d5547\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.209355 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0af6183-c25a-420d-968c-73d8341d5547-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4\" (UID: \"b0af6183-c25a-420d-968c-73d8341d5547\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.311875 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c9jp\" (UniqueName: \"kubernetes.io/projected/b0af6183-c25a-420d-968c-73d8341d5547-kube-api-access-5c9jp\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4\" (UID: \"b0af6183-c25a-420d-968c-73d8341d5547\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.311993 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0af6183-c25a-420d-968c-73d8341d5547-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4\" (UID: \"b0af6183-c25a-420d-968c-73d8341d5547\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.312039 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0af6183-c25a-420d-968c-73d8341d5547-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4\" (UID: \"b0af6183-c25a-420d-968c-73d8341d5547\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.322655 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0af6183-c25a-420d-968c-73d8341d5547-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4\" (UID: \"b0af6183-c25a-420d-968c-73d8341d5547\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.334689 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0af6183-c25a-420d-968c-73d8341d5547-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4\" (UID: \"b0af6183-c25a-420d-968c-73d8341d5547\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.335970 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c9jp\" (UniqueName: \"kubernetes.io/projected/b0af6183-c25a-420d-968c-73d8341d5547-kube-api-access-5c9jp\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4\" (UID: \"b0af6183-c25a-420d-968c-73d8341d5547\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.391110 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.655087 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="987212c4-8615-4a95-8779-768b5c0e0894" path="/var/lib/kubelet/pods/987212c4-8615-4a95-8779-768b5c0e0894/volumes" Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.953672 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4"] Sep 30 20:43:01 crc kubenswrapper[4919]: I0930 20:43:01.975512 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" event={"ID":"b0af6183-c25a-420d-968c-73d8341d5547","Type":"ContainerStarted","Data":"fb829b7a9121a9790f252b8f267622c61251e810998c8f2c38af1aa0ac9e5794"} Sep 30 20:43:02 crc kubenswrapper[4919]: I0930 20:43:02.986949 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" event={"ID":"b0af6183-c25a-420d-968c-73d8341d5547","Type":"ContainerStarted","Data":"72cd307e46883b6807136fa5ca19ccaeaba0ab7028e6b2d78f6ca117420ddf89"} Sep 30 20:43:03 crc kubenswrapper[4919]: I0930 20:43:03.017204 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" podStartSLOduration=1.376477943 podStartE2EDuration="2.017170089s" podCreationTimestamp="2025-09-30 20:43:01 +0000 UTC" firstStartedPulling="2025-09-30 20:43:01.962846531 +0000 UTC m=+1767.078879658" lastFinishedPulling="2025-09-30 20:43:02.603538637 +0000 UTC m=+1767.719571804" observedRunningTime="2025-09-30 20:43:03.005628816 +0000 UTC m=+1768.121662013" watchObservedRunningTime="2025-09-30 20:43:03.017170089 +0000 UTC m=+1768.133203256" Sep 30 20:43:05 crc kubenswrapper[4919]: I0930 20:43:05.647309 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:43:05 crc kubenswrapper[4919]: E0930 20:43:05.648832 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:43:08 crc kubenswrapper[4919]: I0930 20:43:08.048013 4919 generic.go:334] "Generic (PLEG): container finished" podID="b0af6183-c25a-420d-968c-73d8341d5547" containerID="72cd307e46883b6807136fa5ca19ccaeaba0ab7028e6b2d78f6ca117420ddf89" exitCode=0 Sep 30 20:43:08 crc kubenswrapper[4919]: I0930 20:43:08.048159 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" event={"ID":"b0af6183-c25a-420d-968c-73d8341d5547","Type":"ContainerDied","Data":"72cd307e46883b6807136fa5ca19ccaeaba0ab7028e6b2d78f6ca117420ddf89"} Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.060401 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-16e9-account-create-h7pk9"] Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.070539 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-16e9-account-create-h7pk9"] Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.520160 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.593287 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0af6183-c25a-420d-968c-73d8341d5547-ssh-key\") pod \"b0af6183-c25a-420d-968c-73d8341d5547\" (UID: \"b0af6183-c25a-420d-968c-73d8341d5547\") " Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.593541 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0af6183-c25a-420d-968c-73d8341d5547-inventory\") pod \"b0af6183-c25a-420d-968c-73d8341d5547\" (UID: \"b0af6183-c25a-420d-968c-73d8341d5547\") " Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.593643 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5c9jp\" (UniqueName: \"kubernetes.io/projected/b0af6183-c25a-420d-968c-73d8341d5547-kube-api-access-5c9jp\") pod \"b0af6183-c25a-420d-968c-73d8341d5547\" (UID: \"b0af6183-c25a-420d-968c-73d8341d5547\") " Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.598814 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0af6183-c25a-420d-968c-73d8341d5547-kube-api-access-5c9jp" (OuterVolumeSpecName: "kube-api-access-5c9jp") pod "b0af6183-c25a-420d-968c-73d8341d5547" (UID: "b0af6183-c25a-420d-968c-73d8341d5547"). InnerVolumeSpecName "kube-api-access-5c9jp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.620552 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0af6183-c25a-420d-968c-73d8341d5547-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b0af6183-c25a-420d-968c-73d8341d5547" (UID: "b0af6183-c25a-420d-968c-73d8341d5547"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.640515 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0af6183-c25a-420d-968c-73d8341d5547-inventory" (OuterVolumeSpecName: "inventory") pod "b0af6183-c25a-420d-968c-73d8341d5547" (UID: "b0af6183-c25a-420d-968c-73d8341d5547"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.647198 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9" path="/var/lib/kubelet/pods/a86fb69e-33fe-4ec9-b7ca-c5ef1bc859b9/volumes" Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.697929 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0af6183-c25a-420d-968c-73d8341d5547-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.697966 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0af6183-c25a-420d-968c-73d8341d5547-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:43:09 crc kubenswrapper[4919]: I0930 20:43:09.697997 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5c9jp\" (UniqueName: \"kubernetes.io/projected/b0af6183-c25a-420d-968c-73d8341d5547-kube-api-access-5c9jp\") on node \"crc\" DevicePath \"\"" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.043481 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-b360-account-create-p9sdw"] Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.062584 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-bf0f-account-create-l9kml"] Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.076250 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-b360-account-create-p9sdw"] Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.077711 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" event={"ID":"b0af6183-c25a-420d-968c-73d8341d5547","Type":"ContainerDied","Data":"fb829b7a9121a9790f252b8f267622c61251e810998c8f2c38af1aa0ac9e5794"} Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.077817 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb829b7a9121a9790f252b8f267622c61251e810998c8f2c38af1aa0ac9e5794" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.077771 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.087681 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-bf0f-account-create-l9kml"] Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.142591 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh"] Sep 30 20:43:10 crc kubenswrapper[4919]: E0930 20:43:10.142989 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0af6183-c25a-420d-968c-73d8341d5547" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.143005 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0af6183-c25a-420d-968c-73d8341d5547" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.143185 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0af6183-c25a-420d-968c-73d8341d5547" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.143828 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.146547 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.146706 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.146965 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.147102 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.156597 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh"] Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.211401 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06ab2a0e-429a-46ef-9458-18c15c4142c3-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-tsxkh\" (UID: \"06ab2a0e-429a-46ef-9458-18c15c4142c3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.211448 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57pwp\" (UniqueName: \"kubernetes.io/projected/06ab2a0e-429a-46ef-9458-18c15c4142c3-kube-api-access-57pwp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-tsxkh\" (UID: \"06ab2a0e-429a-46ef-9458-18c15c4142c3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.211656 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06ab2a0e-429a-46ef-9458-18c15c4142c3-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-tsxkh\" (UID: \"06ab2a0e-429a-46ef-9458-18c15c4142c3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.313717 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06ab2a0e-429a-46ef-9458-18c15c4142c3-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-tsxkh\" (UID: \"06ab2a0e-429a-46ef-9458-18c15c4142c3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.313838 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06ab2a0e-429a-46ef-9458-18c15c4142c3-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-tsxkh\" (UID: \"06ab2a0e-429a-46ef-9458-18c15c4142c3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.313869 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57pwp\" (UniqueName: \"kubernetes.io/projected/06ab2a0e-429a-46ef-9458-18c15c4142c3-kube-api-access-57pwp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-tsxkh\" (UID: \"06ab2a0e-429a-46ef-9458-18c15c4142c3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.318898 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06ab2a0e-429a-46ef-9458-18c15c4142c3-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-tsxkh\" (UID: \"06ab2a0e-429a-46ef-9458-18c15c4142c3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.319386 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06ab2a0e-429a-46ef-9458-18c15c4142c3-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-tsxkh\" (UID: \"06ab2a0e-429a-46ef-9458-18c15c4142c3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.333782 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57pwp\" (UniqueName: \"kubernetes.io/projected/06ab2a0e-429a-46ef-9458-18c15c4142c3-kube-api-access-57pwp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-tsxkh\" (UID: \"06ab2a0e-429a-46ef-9458-18c15c4142c3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:10 crc kubenswrapper[4919]: I0930 20:43:10.468651 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:11 crc kubenswrapper[4919]: I0930 20:43:11.040492 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh"] Sep 30 20:43:11 crc kubenswrapper[4919]: I0930 20:43:11.091002 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" event={"ID":"06ab2a0e-429a-46ef-9458-18c15c4142c3","Type":"ContainerStarted","Data":"ccfa25b03b4265686bee036d3ebb1c69f0f8d6aad4a5c12d830c330462ecfd4a"} Sep 30 20:43:11 crc kubenswrapper[4919]: I0930 20:43:11.652619 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17ef649b-5bb6-4b14-934a-475366b7b842" path="/var/lib/kubelet/pods/17ef649b-5bb6-4b14-934a-475366b7b842/volumes" Sep 30 20:43:11 crc kubenswrapper[4919]: I0930 20:43:11.654608 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="315c9e3b-9f0f-41be-bff1-282740802b24" path="/var/lib/kubelet/pods/315c9e3b-9f0f-41be-bff1-282740802b24/volumes" Sep 30 20:43:12 crc kubenswrapper[4919]: I0930 20:43:12.108521 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" event={"ID":"06ab2a0e-429a-46ef-9458-18c15c4142c3","Type":"ContainerStarted","Data":"21ed5c29e314de1b792383de584a18ce72ff8cce02fc8817fb4ecfb9f426fdc0"} Sep 30 20:43:12 crc kubenswrapper[4919]: I0930 20:43:12.135068 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" podStartSLOduration=1.48615301 podStartE2EDuration="2.135049233s" podCreationTimestamp="2025-09-30 20:43:10 +0000 UTC" firstStartedPulling="2025-09-30 20:43:11.045864259 +0000 UTC m=+1776.161897416" lastFinishedPulling="2025-09-30 20:43:11.694760492 +0000 UTC m=+1776.810793639" observedRunningTime="2025-09-30 20:43:12.127751042 +0000 UTC m=+1777.243784179" watchObservedRunningTime="2025-09-30 20:43:12.135049233 +0000 UTC m=+1777.251082370" Sep 30 20:43:18 crc kubenswrapper[4919]: I0930 20:43:18.632629 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:43:18 crc kubenswrapper[4919]: E0930 20:43:18.633490 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:43:31 crc kubenswrapper[4919]: I0930 20:43:31.070903 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vbbrr"] Sep 30 20:43:31 crc kubenswrapper[4919]: I0930 20:43:31.083453 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-vbbrr"] Sep 30 20:43:31 crc kubenswrapper[4919]: I0930 20:43:31.643637 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c48fdc4f-d6dc-4f45-8c71-6ae82bece275" path="/var/lib/kubelet/pods/c48fdc4f-d6dc-4f45-8c71-6ae82bece275/volumes" Sep 30 20:43:33 crc kubenswrapper[4919]: I0930 20:43:33.633455 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:43:33 crc kubenswrapper[4919]: E0930 20:43:33.634139 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:43:39 crc kubenswrapper[4919]: I0930 20:43:39.826828 4919 scope.go:117] "RemoveContainer" containerID="df33187181f3336afdb3910ec292c2fea8847c1544a0ae1878a0108fc70ec581" Sep 30 20:43:39 crc kubenswrapper[4919]: I0930 20:43:39.885379 4919 scope.go:117] "RemoveContainer" containerID="6ceebd0a18b1080dd1c41a198a44e9b9db57c5b4297cafd8664cb74182966cd7" Sep 30 20:43:39 crc kubenswrapper[4919]: I0930 20:43:39.952188 4919 scope.go:117] "RemoveContainer" containerID="16b47e5c0c0cc961ecbae1c136be515aa5a4d22385f0e293a0a08d31cd4b82e1" Sep 30 20:43:40 crc kubenswrapper[4919]: I0930 20:43:40.008934 4919 scope.go:117] "RemoveContainer" containerID="97963b2ee3b33c26b856319b87af07b393242e102cf5e6614fb525d828f3d874" Sep 30 20:43:40 crc kubenswrapper[4919]: I0930 20:43:40.056386 4919 scope.go:117] "RemoveContainer" containerID="d58d12d29e6f898761505a675ef3a1486fa32516182cac251985ca0cd9e5f286" Sep 30 20:43:40 crc kubenswrapper[4919]: I0930 20:43:40.100980 4919 scope.go:117] "RemoveContainer" containerID="ec58b669e72c316835d3e02244b0f8afdd0a38ffe265593c4a749d3ddfd5e1de" Sep 30 20:43:40 crc kubenswrapper[4919]: I0930 20:43:40.146351 4919 scope.go:117] "RemoveContainer" containerID="bcad8474bb62bf251e2a3de6999d78cc933c33f246eeb6eab5c4a28d7d386937" Sep 30 20:43:46 crc kubenswrapper[4919]: I0930 20:43:46.632609 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:43:46 crc kubenswrapper[4919]: E0930 20:43:46.633724 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:43:49 crc kubenswrapper[4919]: I0930 20:43:49.556395 4919 generic.go:334] "Generic (PLEG): container finished" podID="06ab2a0e-429a-46ef-9458-18c15c4142c3" containerID="21ed5c29e314de1b792383de584a18ce72ff8cce02fc8817fb4ecfb9f426fdc0" exitCode=0 Sep 30 20:43:49 crc kubenswrapper[4919]: I0930 20:43:49.556515 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" event={"ID":"06ab2a0e-429a-46ef-9458-18c15c4142c3","Type":"ContainerDied","Data":"21ed5c29e314de1b792383de584a18ce72ff8cce02fc8817fb4ecfb9f426fdc0"} Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.020141 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.134714 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06ab2a0e-429a-46ef-9458-18c15c4142c3-ssh-key\") pod \"06ab2a0e-429a-46ef-9458-18c15c4142c3\" (UID: \"06ab2a0e-429a-46ef-9458-18c15c4142c3\") " Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.134757 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57pwp\" (UniqueName: \"kubernetes.io/projected/06ab2a0e-429a-46ef-9458-18c15c4142c3-kube-api-access-57pwp\") pod \"06ab2a0e-429a-46ef-9458-18c15c4142c3\" (UID: \"06ab2a0e-429a-46ef-9458-18c15c4142c3\") " Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.134781 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06ab2a0e-429a-46ef-9458-18c15c4142c3-inventory\") pod \"06ab2a0e-429a-46ef-9458-18c15c4142c3\" (UID: \"06ab2a0e-429a-46ef-9458-18c15c4142c3\") " Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.162122 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06ab2a0e-429a-46ef-9458-18c15c4142c3-kube-api-access-57pwp" (OuterVolumeSpecName: "kube-api-access-57pwp") pod "06ab2a0e-429a-46ef-9458-18c15c4142c3" (UID: "06ab2a0e-429a-46ef-9458-18c15c4142c3"). InnerVolumeSpecName "kube-api-access-57pwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.179307 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ab2a0e-429a-46ef-9458-18c15c4142c3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "06ab2a0e-429a-46ef-9458-18c15c4142c3" (UID: "06ab2a0e-429a-46ef-9458-18c15c4142c3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.183963 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ab2a0e-429a-46ef-9458-18c15c4142c3-inventory" (OuterVolumeSpecName: "inventory") pod "06ab2a0e-429a-46ef-9458-18c15c4142c3" (UID: "06ab2a0e-429a-46ef-9458-18c15c4142c3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.237121 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/06ab2a0e-429a-46ef-9458-18c15c4142c3-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.237157 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57pwp\" (UniqueName: \"kubernetes.io/projected/06ab2a0e-429a-46ef-9458-18c15c4142c3-kube-api-access-57pwp\") on node \"crc\" DevicePath \"\"" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.237169 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/06ab2a0e-429a-46ef-9458-18c15c4142c3-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.439281 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vhds4"] Sep 30 20:43:51 crc kubenswrapper[4919]: E0930 20:43:51.439990 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ab2a0e-429a-46ef-9458-18c15c4142c3" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.440024 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ab2a0e-429a-46ef-9458-18c15c4142c3" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.440437 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ab2a0e-429a-46ef-9458-18c15c4142c3" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.443463 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.459005 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vhds4"] Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.549184 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8ac998b-b264-461f-8c10-c55ddc647c29-utilities\") pod \"community-operators-vhds4\" (UID: \"c8ac998b-b264-461f-8c10-c55ddc647c29\") " pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.549266 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7d55\" (UniqueName: \"kubernetes.io/projected/c8ac998b-b264-461f-8c10-c55ddc647c29-kube-api-access-q7d55\") pod \"community-operators-vhds4\" (UID: \"c8ac998b-b264-461f-8c10-c55ddc647c29\") " pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.549332 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8ac998b-b264-461f-8c10-c55ddc647c29-catalog-content\") pod \"community-operators-vhds4\" (UID: \"c8ac998b-b264-461f-8c10-c55ddc647c29\") " pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.579343 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" event={"ID":"06ab2a0e-429a-46ef-9458-18c15c4142c3","Type":"ContainerDied","Data":"ccfa25b03b4265686bee036d3ebb1c69f0f8d6aad4a5c12d830c330462ecfd4a"} Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.579392 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ccfa25b03b4265686bee036d3ebb1c69f0f8d6aad4a5c12d830c330462ecfd4a" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.579405 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-tsxkh" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.651328 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8ac998b-b264-461f-8c10-c55ddc647c29-utilities\") pod \"community-operators-vhds4\" (UID: \"c8ac998b-b264-461f-8c10-c55ddc647c29\") " pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.651754 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7d55\" (UniqueName: \"kubernetes.io/projected/c8ac998b-b264-461f-8c10-c55ddc647c29-kube-api-access-q7d55\") pod \"community-operators-vhds4\" (UID: \"c8ac998b-b264-461f-8c10-c55ddc647c29\") " pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.651826 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8ac998b-b264-461f-8c10-c55ddc647c29-catalog-content\") pod \"community-operators-vhds4\" (UID: \"c8ac998b-b264-461f-8c10-c55ddc647c29\") " pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.652404 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8ac998b-b264-461f-8c10-c55ddc647c29-catalog-content\") pod \"community-operators-vhds4\" (UID: \"c8ac998b-b264-461f-8c10-c55ddc647c29\") " pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.652645 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8ac998b-b264-461f-8c10-c55ddc647c29-utilities\") pod \"community-operators-vhds4\" (UID: \"c8ac998b-b264-461f-8c10-c55ddc647c29\") " pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.678147 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7d55\" (UniqueName: \"kubernetes.io/projected/c8ac998b-b264-461f-8c10-c55ddc647c29-kube-api-access-q7d55\") pod \"community-operators-vhds4\" (UID: \"c8ac998b-b264-461f-8c10-c55ddc647c29\") " pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.694499 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng"] Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.695709 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.700775 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.701018 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.701199 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.701278 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.714775 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng"] Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.807262 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.857621 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/998048ab-cfdd-4179-a40e-345b0d8a792d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4vfng\" (UID: \"998048ab-cfdd-4179-a40e-345b0d8a792d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.857665 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/998048ab-cfdd-4179-a40e-345b0d8a792d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4vfng\" (UID: \"998048ab-cfdd-4179-a40e-345b0d8a792d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.857782 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tch6l\" (UniqueName: \"kubernetes.io/projected/998048ab-cfdd-4179-a40e-345b0d8a792d-kube-api-access-tch6l\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4vfng\" (UID: \"998048ab-cfdd-4179-a40e-345b0d8a792d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.962247 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/998048ab-cfdd-4179-a40e-345b0d8a792d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4vfng\" (UID: \"998048ab-cfdd-4179-a40e-345b0d8a792d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.962512 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/998048ab-cfdd-4179-a40e-345b0d8a792d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4vfng\" (UID: \"998048ab-cfdd-4179-a40e-345b0d8a792d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.962622 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tch6l\" (UniqueName: \"kubernetes.io/projected/998048ab-cfdd-4179-a40e-345b0d8a792d-kube-api-access-tch6l\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4vfng\" (UID: \"998048ab-cfdd-4179-a40e-345b0d8a792d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.971102 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/998048ab-cfdd-4179-a40e-345b0d8a792d-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4vfng\" (UID: \"998048ab-cfdd-4179-a40e-345b0d8a792d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.971871 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/998048ab-cfdd-4179-a40e-345b0d8a792d-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4vfng\" (UID: \"998048ab-cfdd-4179-a40e-345b0d8a792d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:43:51 crc kubenswrapper[4919]: I0930 20:43:51.981243 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tch6l\" (UniqueName: \"kubernetes.io/projected/998048ab-cfdd-4179-a40e-345b0d8a792d-kube-api-access-tch6l\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-4vfng\" (UID: \"998048ab-cfdd-4179-a40e-345b0d8a792d\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:43:52 crc kubenswrapper[4919]: I0930 20:43:52.021537 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:43:52 crc kubenswrapper[4919]: I0930 20:43:52.323775 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vhds4"] Sep 30 20:43:52 crc kubenswrapper[4919]: I0930 20:43:52.544081 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng"] Sep 30 20:43:52 crc kubenswrapper[4919]: I0930 20:43:52.588695 4919 generic.go:334] "Generic (PLEG): container finished" podID="c8ac998b-b264-461f-8c10-c55ddc647c29" containerID="85874cea649a99960d32c189c081256f27a023c27bce6dc478c5812ff27d8039" exitCode=0 Sep 30 20:43:52 crc kubenswrapper[4919]: I0930 20:43:52.588764 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhds4" event={"ID":"c8ac998b-b264-461f-8c10-c55ddc647c29","Type":"ContainerDied","Data":"85874cea649a99960d32c189c081256f27a023c27bce6dc478c5812ff27d8039"} Sep 30 20:43:52 crc kubenswrapper[4919]: I0930 20:43:52.588835 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhds4" event={"ID":"c8ac998b-b264-461f-8c10-c55ddc647c29","Type":"ContainerStarted","Data":"45e1033b1017548056e1abda58218e7a648943c3469d9eb573b8c072b75d80a9"} Sep 30 20:43:52 crc kubenswrapper[4919]: W0930 20:43:52.590679 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod998048ab_cfdd_4179_a40e_345b0d8a792d.slice/crio-fc09b5a6c703d1d557b8160fe457ea6877694dbe618df68c0605e01dd18f02ce WatchSource:0}: Error finding container fc09b5a6c703d1d557b8160fe457ea6877694dbe618df68c0605e01dd18f02ce: Status 404 returned error can't find the container with id fc09b5a6c703d1d557b8160fe457ea6877694dbe618df68c0605e01dd18f02ce Sep 30 20:43:52 crc kubenswrapper[4919]: I0930 20:43:52.591184 4919 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:43:53 crc kubenswrapper[4919]: I0930 20:43:53.035834 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-w646s"] Sep 30 20:43:53 crc kubenswrapper[4919]: I0930 20:43:53.043632 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-w646s"] Sep 30 20:43:53 crc kubenswrapper[4919]: I0930 20:43:53.604078 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" event={"ID":"998048ab-cfdd-4179-a40e-345b0d8a792d","Type":"ContainerStarted","Data":"a366a0d59510879f4d8084789c4b209bb92aaf2249eebd8e546ae2b20fa90d7e"} Sep 30 20:43:53 crc kubenswrapper[4919]: I0930 20:43:53.604469 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" event={"ID":"998048ab-cfdd-4179-a40e-345b0d8a792d","Type":"ContainerStarted","Data":"fc09b5a6c703d1d557b8160fe457ea6877694dbe618df68c0605e01dd18f02ce"} Sep 30 20:43:53 crc kubenswrapper[4919]: I0930 20:43:53.630957 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" podStartSLOduration=2.141663125 podStartE2EDuration="2.63093371s" podCreationTimestamp="2025-09-30 20:43:51 +0000 UTC" firstStartedPulling="2025-09-30 20:43:52.595841567 +0000 UTC m=+1817.711874694" lastFinishedPulling="2025-09-30 20:43:53.085112152 +0000 UTC m=+1818.201145279" observedRunningTime="2025-09-30 20:43:53.629861459 +0000 UTC m=+1818.745894646" watchObservedRunningTime="2025-09-30 20:43:53.63093371 +0000 UTC m=+1818.746966867" Sep 30 20:43:53 crc kubenswrapper[4919]: I0930 20:43:53.649962 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="222832c8-8bfd-460e-ae09-5594896b36fc" path="/var/lib/kubelet/pods/222832c8-8bfd-460e-ae09-5594896b36fc/volumes" Sep 30 20:43:54 crc kubenswrapper[4919]: I0930 20:43:54.035590 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-54fj2"] Sep 30 20:43:54 crc kubenswrapper[4919]: I0930 20:43:54.046597 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-54fj2"] Sep 30 20:43:54 crc kubenswrapper[4919]: I0930 20:43:54.621130 4919 generic.go:334] "Generic (PLEG): container finished" podID="c8ac998b-b264-461f-8c10-c55ddc647c29" containerID="d23e51e8df585ef32ba27badd93a19b3ea946eacdae5c6b7519a30c7731ddf7f" exitCode=0 Sep 30 20:43:54 crc kubenswrapper[4919]: I0930 20:43:54.621341 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhds4" event={"ID":"c8ac998b-b264-461f-8c10-c55ddc647c29","Type":"ContainerDied","Data":"d23e51e8df585ef32ba27badd93a19b3ea946eacdae5c6b7519a30c7731ddf7f"} Sep 30 20:43:55 crc kubenswrapper[4919]: I0930 20:43:55.681393 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a83d10d-4a42-4177-b227-0da1b675c06b" path="/var/lib/kubelet/pods/1a83d10d-4a42-4177-b227-0da1b675c06b/volumes" Sep 30 20:43:55 crc kubenswrapper[4919]: I0930 20:43:55.682753 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhds4" event={"ID":"c8ac998b-b264-461f-8c10-c55ddc647c29","Type":"ContainerStarted","Data":"1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6"} Sep 30 20:43:55 crc kubenswrapper[4919]: I0930 20:43:55.683272 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vhds4" podStartSLOduration=2.155783451 podStartE2EDuration="4.683257677s" podCreationTimestamp="2025-09-30 20:43:51 +0000 UTC" firstStartedPulling="2025-09-30 20:43:52.590950916 +0000 UTC m=+1817.706984043" lastFinishedPulling="2025-09-30 20:43:55.118425132 +0000 UTC m=+1820.234458269" observedRunningTime="2025-09-30 20:43:55.682712991 +0000 UTC m=+1820.798746118" watchObservedRunningTime="2025-09-30 20:43:55.683257677 +0000 UTC m=+1820.799290804" Sep 30 20:43:57 crc kubenswrapper[4919]: I0930 20:43:57.632876 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:43:58 crc kubenswrapper[4919]: I0930 20:43:58.700394 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"9d3002085d98ad4d2cc90f8e71e2652fb9ade38e472b41de06b7d6dc5a2524c9"} Sep 30 20:44:01 crc kubenswrapper[4919]: I0930 20:44:01.808883 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:44:01 crc kubenswrapper[4919]: I0930 20:44:01.809644 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:44:01 crc kubenswrapper[4919]: I0930 20:44:01.875478 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:44:02 crc kubenswrapper[4919]: I0930 20:44:02.808988 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:44:02 crc kubenswrapper[4919]: I0930 20:44:02.853319 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vhds4"] Sep 30 20:44:04 crc kubenswrapper[4919]: I0930 20:44:04.765748 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vhds4" podUID="c8ac998b-b264-461f-8c10-c55ddc647c29" containerName="registry-server" containerID="cri-o://1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6" gracePeriod=2 Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.370878 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.464320 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7d55\" (UniqueName: \"kubernetes.io/projected/c8ac998b-b264-461f-8c10-c55ddc647c29-kube-api-access-q7d55\") pod \"c8ac998b-b264-461f-8c10-c55ddc647c29\" (UID: \"c8ac998b-b264-461f-8c10-c55ddc647c29\") " Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.464492 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8ac998b-b264-461f-8c10-c55ddc647c29-catalog-content\") pod \"c8ac998b-b264-461f-8c10-c55ddc647c29\" (UID: \"c8ac998b-b264-461f-8c10-c55ddc647c29\") " Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.464663 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8ac998b-b264-461f-8c10-c55ddc647c29-utilities\") pod \"c8ac998b-b264-461f-8c10-c55ddc647c29\" (UID: \"c8ac998b-b264-461f-8c10-c55ddc647c29\") " Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.465520 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8ac998b-b264-461f-8c10-c55ddc647c29-utilities" (OuterVolumeSpecName: "utilities") pod "c8ac998b-b264-461f-8c10-c55ddc647c29" (UID: "c8ac998b-b264-461f-8c10-c55ddc647c29"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.470408 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8ac998b-b264-461f-8c10-c55ddc647c29-kube-api-access-q7d55" (OuterVolumeSpecName: "kube-api-access-q7d55") pod "c8ac998b-b264-461f-8c10-c55ddc647c29" (UID: "c8ac998b-b264-461f-8c10-c55ddc647c29"). InnerVolumeSpecName "kube-api-access-q7d55". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.525323 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8ac998b-b264-461f-8c10-c55ddc647c29-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c8ac998b-b264-461f-8c10-c55ddc647c29" (UID: "c8ac998b-b264-461f-8c10-c55ddc647c29"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.566137 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8ac998b-b264-461f-8c10-c55ddc647c29-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.566171 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7d55\" (UniqueName: \"kubernetes.io/projected/c8ac998b-b264-461f-8c10-c55ddc647c29-kube-api-access-q7d55\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.566181 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8ac998b-b264-461f-8c10-c55ddc647c29-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.780429 4919 generic.go:334] "Generic (PLEG): container finished" podID="c8ac998b-b264-461f-8c10-c55ddc647c29" containerID="1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6" exitCode=0 Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.780477 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhds4" event={"ID":"c8ac998b-b264-461f-8c10-c55ddc647c29","Type":"ContainerDied","Data":"1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6"} Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.780507 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhds4" event={"ID":"c8ac998b-b264-461f-8c10-c55ddc647c29","Type":"ContainerDied","Data":"45e1033b1017548056e1abda58218e7a648943c3469d9eb573b8c072b75d80a9"} Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.780528 4919 scope.go:117] "RemoveContainer" containerID="1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.780544 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vhds4" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.810530 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vhds4"] Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.825468 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vhds4"] Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.829098 4919 scope.go:117] "RemoveContainer" containerID="d23e51e8df585ef32ba27badd93a19b3ea946eacdae5c6b7519a30c7731ddf7f" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.854672 4919 scope.go:117] "RemoveContainer" containerID="85874cea649a99960d32c189c081256f27a023c27bce6dc478c5812ff27d8039" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.920070 4919 scope.go:117] "RemoveContainer" containerID="1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6" Sep 30 20:44:05 crc kubenswrapper[4919]: E0930 20:44:05.921310 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6\": container with ID starting with 1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6 not found: ID does not exist" containerID="1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.921361 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6"} err="failed to get container status \"1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6\": rpc error: code = NotFound desc = could not find container \"1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6\": container with ID starting with 1119e72bfef0cc158caf4369e9c4eac42bc430d06602e14f95a3cb6953a96de6 not found: ID does not exist" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.921399 4919 scope.go:117] "RemoveContainer" containerID="d23e51e8df585ef32ba27badd93a19b3ea946eacdae5c6b7519a30c7731ddf7f" Sep 30 20:44:05 crc kubenswrapper[4919]: E0930 20:44:05.921758 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d23e51e8df585ef32ba27badd93a19b3ea946eacdae5c6b7519a30c7731ddf7f\": container with ID starting with d23e51e8df585ef32ba27badd93a19b3ea946eacdae5c6b7519a30c7731ddf7f not found: ID does not exist" containerID="d23e51e8df585ef32ba27badd93a19b3ea946eacdae5c6b7519a30c7731ddf7f" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.921840 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d23e51e8df585ef32ba27badd93a19b3ea946eacdae5c6b7519a30c7731ddf7f"} err="failed to get container status \"d23e51e8df585ef32ba27badd93a19b3ea946eacdae5c6b7519a30c7731ddf7f\": rpc error: code = NotFound desc = could not find container \"d23e51e8df585ef32ba27badd93a19b3ea946eacdae5c6b7519a30c7731ddf7f\": container with ID starting with d23e51e8df585ef32ba27badd93a19b3ea946eacdae5c6b7519a30c7731ddf7f not found: ID does not exist" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.921923 4919 scope.go:117] "RemoveContainer" containerID="85874cea649a99960d32c189c081256f27a023c27bce6dc478c5812ff27d8039" Sep 30 20:44:05 crc kubenswrapper[4919]: E0930 20:44:05.922261 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85874cea649a99960d32c189c081256f27a023c27bce6dc478c5812ff27d8039\": container with ID starting with 85874cea649a99960d32c189c081256f27a023c27bce6dc478c5812ff27d8039 not found: ID does not exist" containerID="85874cea649a99960d32c189c081256f27a023c27bce6dc478c5812ff27d8039" Sep 30 20:44:05 crc kubenswrapper[4919]: I0930 20:44:05.922299 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85874cea649a99960d32c189c081256f27a023c27bce6dc478c5812ff27d8039"} err="failed to get container status \"85874cea649a99960d32c189c081256f27a023c27bce6dc478c5812ff27d8039\": rpc error: code = NotFound desc = could not find container \"85874cea649a99960d32c189c081256f27a023c27bce6dc478c5812ff27d8039\": container with ID starting with 85874cea649a99960d32c189c081256f27a023c27bce6dc478c5812ff27d8039 not found: ID does not exist" Sep 30 20:44:07 crc kubenswrapper[4919]: I0930 20:44:07.658435 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8ac998b-b264-461f-8c10-c55ddc647c29" path="/var/lib/kubelet/pods/c8ac998b-b264-461f-8c10-c55ddc647c29/volumes" Sep 30 20:44:38 crc kubenswrapper[4919]: I0930 20:44:38.058721 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-chwgq"] Sep 30 20:44:38 crc kubenswrapper[4919]: I0930 20:44:38.072642 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-chwgq"] Sep 30 20:44:39 crc kubenswrapper[4919]: I0930 20:44:39.648736 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9" path="/var/lib/kubelet/pods/6e4cbf93-6c40-4c11-b15e-a1193ff7b3f9/volumes" Sep 30 20:44:40 crc kubenswrapper[4919]: I0930 20:44:40.370791 4919 scope.go:117] "RemoveContainer" containerID="5868f30b7f6ec022bfb9b8c885b8347dd98ab0699e4f6389db1ced28b11c70d5" Sep 30 20:44:40 crc kubenswrapper[4919]: I0930 20:44:40.424979 4919 scope.go:117] "RemoveContainer" containerID="f75d695c2a138e615cd32ddb4cf978d927b46768cd76449d794758368125c09d" Sep 30 20:44:40 crc kubenswrapper[4919]: I0930 20:44:40.502398 4919 scope.go:117] "RemoveContainer" containerID="3c60a188c676999443de7167aedf505359dc147d85460d0aaf72e8e2261f60ed" Sep 30 20:44:51 crc kubenswrapper[4919]: I0930 20:44:51.288050 4919 generic.go:334] "Generic (PLEG): container finished" podID="998048ab-cfdd-4179-a40e-345b0d8a792d" containerID="a366a0d59510879f4d8084789c4b209bb92aaf2249eebd8e546ae2b20fa90d7e" exitCode=2 Sep 30 20:44:51 crc kubenswrapper[4919]: I0930 20:44:51.288132 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" event={"ID":"998048ab-cfdd-4179-a40e-345b0d8a792d","Type":"ContainerDied","Data":"a366a0d59510879f4d8084789c4b209bb92aaf2249eebd8e546ae2b20fa90d7e"} Sep 30 20:44:52 crc kubenswrapper[4919]: I0930 20:44:52.847998 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:44:52 crc kubenswrapper[4919]: I0930 20:44:52.914379 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tch6l\" (UniqueName: \"kubernetes.io/projected/998048ab-cfdd-4179-a40e-345b0d8a792d-kube-api-access-tch6l\") pod \"998048ab-cfdd-4179-a40e-345b0d8a792d\" (UID: \"998048ab-cfdd-4179-a40e-345b0d8a792d\") " Sep 30 20:44:52 crc kubenswrapper[4919]: I0930 20:44:52.914652 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/998048ab-cfdd-4179-a40e-345b0d8a792d-ssh-key\") pod \"998048ab-cfdd-4179-a40e-345b0d8a792d\" (UID: \"998048ab-cfdd-4179-a40e-345b0d8a792d\") " Sep 30 20:44:52 crc kubenswrapper[4919]: I0930 20:44:52.914855 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/998048ab-cfdd-4179-a40e-345b0d8a792d-inventory\") pod \"998048ab-cfdd-4179-a40e-345b0d8a792d\" (UID: \"998048ab-cfdd-4179-a40e-345b0d8a792d\") " Sep 30 20:44:52 crc kubenswrapper[4919]: I0930 20:44:52.925436 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/998048ab-cfdd-4179-a40e-345b0d8a792d-kube-api-access-tch6l" (OuterVolumeSpecName: "kube-api-access-tch6l") pod "998048ab-cfdd-4179-a40e-345b0d8a792d" (UID: "998048ab-cfdd-4179-a40e-345b0d8a792d"). InnerVolumeSpecName "kube-api-access-tch6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:44:52 crc kubenswrapper[4919]: I0930 20:44:52.949018 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/998048ab-cfdd-4179-a40e-345b0d8a792d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "998048ab-cfdd-4179-a40e-345b0d8a792d" (UID: "998048ab-cfdd-4179-a40e-345b0d8a792d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:44:52 crc kubenswrapper[4919]: I0930 20:44:52.960440 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/998048ab-cfdd-4179-a40e-345b0d8a792d-inventory" (OuterVolumeSpecName: "inventory") pod "998048ab-cfdd-4179-a40e-345b0d8a792d" (UID: "998048ab-cfdd-4179-a40e-345b0d8a792d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:44:53 crc kubenswrapper[4919]: I0930 20:44:53.019582 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tch6l\" (UniqueName: \"kubernetes.io/projected/998048ab-cfdd-4179-a40e-345b0d8a792d-kube-api-access-tch6l\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:53 crc kubenswrapper[4919]: I0930 20:44:53.019797 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/998048ab-cfdd-4179-a40e-345b0d8a792d-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:53 crc kubenswrapper[4919]: I0930 20:44:53.019946 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/998048ab-cfdd-4179-a40e-345b0d8a792d-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:44:53 crc kubenswrapper[4919]: I0930 20:44:53.317778 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" event={"ID":"998048ab-cfdd-4179-a40e-345b0d8a792d","Type":"ContainerDied","Data":"fc09b5a6c703d1d557b8160fe457ea6877694dbe618df68c0605e01dd18f02ce"} Sep 30 20:44:53 crc kubenswrapper[4919]: I0930 20:44:53.317855 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc09b5a6c703d1d557b8160fe457ea6877694dbe618df68c0605e01dd18f02ce" Sep 30 20:44:53 crc kubenswrapper[4919]: I0930 20:44:53.317933 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-4vfng" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.151051 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt"] Sep 30 20:45:00 crc kubenswrapper[4919]: E0930 20:45:00.152081 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8ac998b-b264-461f-8c10-c55ddc647c29" containerName="extract-content" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.152098 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8ac998b-b264-461f-8c10-c55ddc647c29" containerName="extract-content" Sep 30 20:45:00 crc kubenswrapper[4919]: E0930 20:45:00.152119 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8ac998b-b264-461f-8c10-c55ddc647c29" containerName="registry-server" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.152126 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8ac998b-b264-461f-8c10-c55ddc647c29" containerName="registry-server" Sep 30 20:45:00 crc kubenswrapper[4919]: E0930 20:45:00.152142 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8ac998b-b264-461f-8c10-c55ddc647c29" containerName="extract-utilities" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.152151 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8ac998b-b264-461f-8c10-c55ddc647c29" containerName="extract-utilities" Sep 30 20:45:00 crc kubenswrapper[4919]: E0930 20:45:00.152173 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="998048ab-cfdd-4179-a40e-345b0d8a792d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.152181 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="998048ab-cfdd-4179-a40e-345b0d8a792d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.153672 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="998048ab-cfdd-4179-a40e-345b0d8a792d" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.153712 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8ac998b-b264-461f-8c10-c55ddc647c29" containerName="registry-server" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.154440 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.156142 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.157588 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.165080 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt"] Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.204877 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/044cd7a9-23a6-469c-8b35-3d261825c797-secret-volume\") pod \"collect-profiles-29321085-r6pjt\" (UID: \"044cd7a9-23a6-469c-8b35-3d261825c797\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.204937 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/044cd7a9-23a6-469c-8b35-3d261825c797-config-volume\") pod \"collect-profiles-29321085-r6pjt\" (UID: \"044cd7a9-23a6-469c-8b35-3d261825c797\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.205296 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph2s9\" (UniqueName: \"kubernetes.io/projected/044cd7a9-23a6-469c-8b35-3d261825c797-kube-api-access-ph2s9\") pod \"collect-profiles-29321085-r6pjt\" (UID: \"044cd7a9-23a6-469c-8b35-3d261825c797\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.307601 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/044cd7a9-23a6-469c-8b35-3d261825c797-secret-volume\") pod \"collect-profiles-29321085-r6pjt\" (UID: \"044cd7a9-23a6-469c-8b35-3d261825c797\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.307647 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/044cd7a9-23a6-469c-8b35-3d261825c797-config-volume\") pod \"collect-profiles-29321085-r6pjt\" (UID: \"044cd7a9-23a6-469c-8b35-3d261825c797\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.307717 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph2s9\" (UniqueName: \"kubernetes.io/projected/044cd7a9-23a6-469c-8b35-3d261825c797-kube-api-access-ph2s9\") pod \"collect-profiles-29321085-r6pjt\" (UID: \"044cd7a9-23a6-469c-8b35-3d261825c797\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.308698 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/044cd7a9-23a6-469c-8b35-3d261825c797-config-volume\") pod \"collect-profiles-29321085-r6pjt\" (UID: \"044cd7a9-23a6-469c-8b35-3d261825c797\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.319804 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/044cd7a9-23a6-469c-8b35-3d261825c797-secret-volume\") pod \"collect-profiles-29321085-r6pjt\" (UID: \"044cd7a9-23a6-469c-8b35-3d261825c797\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.335769 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph2s9\" (UniqueName: \"kubernetes.io/projected/044cd7a9-23a6-469c-8b35-3d261825c797-kube-api-access-ph2s9\") pod \"collect-profiles-29321085-r6pjt\" (UID: \"044cd7a9-23a6-469c-8b35-3d261825c797\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:00 crc kubenswrapper[4919]: I0930 20:45:00.493973 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.000919 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt"] Sep 30 20:45:01 crc kubenswrapper[4919]: W0930 20:45:01.006529 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod044cd7a9_23a6_469c_8b35_3d261825c797.slice/crio-892d94fbf3250045b6ce95a78b3f590aa39825b504d7e1a446cab0a8aa27961d WatchSource:0}: Error finding container 892d94fbf3250045b6ce95a78b3f590aa39825b504d7e1a446cab0a8aa27961d: Status 404 returned error can't find the container with id 892d94fbf3250045b6ce95a78b3f590aa39825b504d7e1a446cab0a8aa27961d Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.056294 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78"] Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.058981 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.061290 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.061346 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.061600 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.061723 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.067867 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78"] Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.140516 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18129bcf-0bdc-4437-83de-b9e5f20c66d3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s8n78\" (UID: \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.140601 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18129bcf-0bdc-4437-83de-b9e5f20c66d3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s8n78\" (UID: \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.140835 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9v4j7\" (UniqueName: \"kubernetes.io/projected/18129bcf-0bdc-4437-83de-b9e5f20c66d3-kube-api-access-9v4j7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s8n78\" (UID: \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.242556 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18129bcf-0bdc-4437-83de-b9e5f20c66d3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s8n78\" (UID: \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.242874 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18129bcf-0bdc-4437-83de-b9e5f20c66d3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s8n78\" (UID: \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.242955 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9v4j7\" (UniqueName: \"kubernetes.io/projected/18129bcf-0bdc-4437-83de-b9e5f20c66d3-kube-api-access-9v4j7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s8n78\" (UID: \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.250034 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18129bcf-0bdc-4437-83de-b9e5f20c66d3-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s8n78\" (UID: \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.252609 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18129bcf-0bdc-4437-83de-b9e5f20c66d3-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s8n78\" (UID: \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.272584 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9v4j7\" (UniqueName: \"kubernetes.io/projected/18129bcf-0bdc-4437-83de-b9e5f20c66d3-kube-api-access-9v4j7\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-s8n78\" (UID: \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.414157 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.418185 4919 generic.go:334] "Generic (PLEG): container finished" podID="044cd7a9-23a6-469c-8b35-3d261825c797" containerID="3e18ff700a51cf5740a065e16a8424b2c89aee74fb38a32460905060a1479078" exitCode=0 Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.418276 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" event={"ID":"044cd7a9-23a6-469c-8b35-3d261825c797","Type":"ContainerDied","Data":"3e18ff700a51cf5740a065e16a8424b2c89aee74fb38a32460905060a1479078"} Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.418309 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" event={"ID":"044cd7a9-23a6-469c-8b35-3d261825c797","Type":"ContainerStarted","Data":"892d94fbf3250045b6ce95a78b3f590aa39825b504d7e1a446cab0a8aa27961d"} Sep 30 20:45:01 crc kubenswrapper[4919]: W0930 20:45:01.922599 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod18129bcf_0bdc_4437_83de_b9e5f20c66d3.slice/crio-c8225711e710ad1f699a64baad92ccd296af7232dcf5133d94ec23b3dcfe1069 WatchSource:0}: Error finding container c8225711e710ad1f699a64baad92ccd296af7232dcf5133d94ec23b3dcfe1069: Status 404 returned error can't find the container with id c8225711e710ad1f699a64baad92ccd296af7232dcf5133d94ec23b3dcfe1069 Sep 30 20:45:01 crc kubenswrapper[4919]: I0930 20:45:01.922839 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78"] Sep 30 20:45:02 crc kubenswrapper[4919]: I0930 20:45:02.436000 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" event={"ID":"18129bcf-0bdc-4437-83de-b9e5f20c66d3","Type":"ContainerStarted","Data":"c8225711e710ad1f699a64baad92ccd296af7232dcf5133d94ec23b3dcfe1069"} Sep 30 20:45:02 crc kubenswrapper[4919]: I0930 20:45:02.940892 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.082580 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ph2s9\" (UniqueName: \"kubernetes.io/projected/044cd7a9-23a6-469c-8b35-3d261825c797-kube-api-access-ph2s9\") pod \"044cd7a9-23a6-469c-8b35-3d261825c797\" (UID: \"044cd7a9-23a6-469c-8b35-3d261825c797\") " Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.082792 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/044cd7a9-23a6-469c-8b35-3d261825c797-secret-volume\") pod \"044cd7a9-23a6-469c-8b35-3d261825c797\" (UID: \"044cd7a9-23a6-469c-8b35-3d261825c797\") " Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.082871 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/044cd7a9-23a6-469c-8b35-3d261825c797-config-volume\") pod \"044cd7a9-23a6-469c-8b35-3d261825c797\" (UID: \"044cd7a9-23a6-469c-8b35-3d261825c797\") " Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.087067 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/044cd7a9-23a6-469c-8b35-3d261825c797-config-volume" (OuterVolumeSpecName: "config-volume") pod "044cd7a9-23a6-469c-8b35-3d261825c797" (UID: "044cd7a9-23a6-469c-8b35-3d261825c797"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.090765 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/044cd7a9-23a6-469c-8b35-3d261825c797-kube-api-access-ph2s9" (OuterVolumeSpecName: "kube-api-access-ph2s9") pod "044cd7a9-23a6-469c-8b35-3d261825c797" (UID: "044cd7a9-23a6-469c-8b35-3d261825c797"). InnerVolumeSpecName "kube-api-access-ph2s9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.090918 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/044cd7a9-23a6-469c-8b35-3d261825c797-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "044cd7a9-23a6-469c-8b35-3d261825c797" (UID: "044cd7a9-23a6-469c-8b35-3d261825c797"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.185626 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ph2s9\" (UniqueName: \"kubernetes.io/projected/044cd7a9-23a6-469c-8b35-3d261825c797-kube-api-access-ph2s9\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.185659 4919 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/044cd7a9-23a6-469c-8b35-3d261825c797-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.185669 4919 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/044cd7a9-23a6-469c-8b35-3d261825c797-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.455085 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" event={"ID":"18129bcf-0bdc-4437-83de-b9e5f20c66d3","Type":"ContainerStarted","Data":"a52558a80b1dc3315b48096867d7fb55e306a4eb205c7c8561cab7168de4ff8e"} Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.457477 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" event={"ID":"044cd7a9-23a6-469c-8b35-3d261825c797","Type":"ContainerDied","Data":"892d94fbf3250045b6ce95a78b3f590aa39825b504d7e1a446cab0a8aa27961d"} Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.457510 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="892d94fbf3250045b6ce95a78b3f590aa39825b504d7e1a446cab0a8aa27961d" Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.457568 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321085-r6pjt" Sep 30 20:45:03 crc kubenswrapper[4919]: I0930 20:45:03.483052 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" podStartSLOduration=1.635376553 podStartE2EDuration="2.483029804s" podCreationTimestamp="2025-09-30 20:45:01 +0000 UTC" firstStartedPulling="2025-09-30 20:45:01.925633149 +0000 UTC m=+1887.041666286" lastFinishedPulling="2025-09-30 20:45:02.7732864 +0000 UTC m=+1887.889319537" observedRunningTime="2025-09-30 20:45:03.473813755 +0000 UTC m=+1888.589846882" watchObservedRunningTime="2025-09-30 20:45:03.483029804 +0000 UTC m=+1888.599062941" Sep 30 20:45:50 crc kubenswrapper[4919]: I0930 20:45:50.957070 4919 generic.go:334] "Generic (PLEG): container finished" podID="18129bcf-0bdc-4437-83de-b9e5f20c66d3" containerID="a52558a80b1dc3315b48096867d7fb55e306a4eb205c7c8561cab7168de4ff8e" exitCode=0 Sep 30 20:45:50 crc kubenswrapper[4919]: I0930 20:45:50.957476 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" event={"ID":"18129bcf-0bdc-4437-83de-b9e5f20c66d3","Type":"ContainerDied","Data":"a52558a80b1dc3315b48096867d7fb55e306a4eb205c7c8561cab7168de4ff8e"} Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.425958 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.517430 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18129bcf-0bdc-4437-83de-b9e5f20c66d3-inventory\") pod \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\" (UID: \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\") " Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.517630 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18129bcf-0bdc-4437-83de-b9e5f20c66d3-ssh-key\") pod \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\" (UID: \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\") " Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.517882 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9v4j7\" (UniqueName: \"kubernetes.io/projected/18129bcf-0bdc-4437-83de-b9e5f20c66d3-kube-api-access-9v4j7\") pod \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\" (UID: \"18129bcf-0bdc-4437-83de-b9e5f20c66d3\") " Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.523590 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18129bcf-0bdc-4437-83de-b9e5f20c66d3-kube-api-access-9v4j7" (OuterVolumeSpecName: "kube-api-access-9v4j7") pod "18129bcf-0bdc-4437-83de-b9e5f20c66d3" (UID: "18129bcf-0bdc-4437-83de-b9e5f20c66d3"). InnerVolumeSpecName "kube-api-access-9v4j7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.545164 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18129bcf-0bdc-4437-83de-b9e5f20c66d3-inventory" (OuterVolumeSpecName: "inventory") pod "18129bcf-0bdc-4437-83de-b9e5f20c66d3" (UID: "18129bcf-0bdc-4437-83de-b9e5f20c66d3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.546847 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18129bcf-0bdc-4437-83de-b9e5f20c66d3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "18129bcf-0bdc-4437-83de-b9e5f20c66d3" (UID: "18129bcf-0bdc-4437-83de-b9e5f20c66d3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.619902 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/18129bcf-0bdc-4437-83de-b9e5f20c66d3-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.619947 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9v4j7\" (UniqueName: \"kubernetes.io/projected/18129bcf-0bdc-4437-83de-b9e5f20c66d3-kube-api-access-9v4j7\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.619958 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/18129bcf-0bdc-4437-83de-b9e5f20c66d3-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.983563 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" event={"ID":"18129bcf-0bdc-4437-83de-b9e5f20c66d3","Type":"ContainerDied","Data":"c8225711e710ad1f699a64baad92ccd296af7232dcf5133d94ec23b3dcfe1069"} Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.983613 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8225711e710ad1f699a64baad92ccd296af7232dcf5133d94ec23b3dcfe1069" Sep 30 20:45:52 crc kubenswrapper[4919]: I0930 20:45:52.983655 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-s8n78" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.081482 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-xdv77"] Sep 30 20:45:53 crc kubenswrapper[4919]: E0930 20:45:53.081917 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="044cd7a9-23a6-469c-8b35-3d261825c797" containerName="collect-profiles" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.081936 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="044cd7a9-23a6-469c-8b35-3d261825c797" containerName="collect-profiles" Sep 30 20:45:53 crc kubenswrapper[4919]: E0930 20:45:53.081962 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18129bcf-0bdc-4437-83de-b9e5f20c66d3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.081971 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="18129bcf-0bdc-4437-83de-b9e5f20c66d3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.082204 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="18129bcf-0bdc-4437-83de-b9e5f20c66d3" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.082259 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="044cd7a9-23a6-469c-8b35-3d261825c797" containerName="collect-profiles" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.082963 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.084824 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.087560 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.087851 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.089490 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.104661 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-xdv77"] Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.137474 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qs5pg\" (UniqueName: \"kubernetes.io/projected/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-kube-api-access-qs5pg\") pod \"ssh-known-hosts-edpm-deployment-xdv77\" (UID: \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.137586 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-xdv77\" (UID: \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.137676 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-xdv77\" (UID: \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.239323 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-xdv77\" (UID: \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.239748 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qs5pg\" (UniqueName: \"kubernetes.io/projected/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-kube-api-access-qs5pg\") pod \"ssh-known-hosts-edpm-deployment-xdv77\" (UID: \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.240026 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-xdv77\" (UID: \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.244423 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-xdv77\" (UID: \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.244463 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-xdv77\" (UID: \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.256330 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qs5pg\" (UniqueName: \"kubernetes.io/projected/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-kube-api-access-qs5pg\") pod \"ssh-known-hosts-edpm-deployment-xdv77\" (UID: \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\") " pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:45:53 crc kubenswrapper[4919]: I0930 20:45:53.437804 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:45:54 crc kubenswrapper[4919]: I0930 20:45:54.064320 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-xdv77"] Sep 30 20:45:55 crc kubenswrapper[4919]: I0930 20:45:55.007117 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" event={"ID":"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6","Type":"ContainerStarted","Data":"426238b6fcfd2595404cf8f74069c2212497f2af535a173d78c12ee03d3b2609"} Sep 30 20:45:56 crc kubenswrapper[4919]: I0930 20:45:56.017942 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" event={"ID":"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6","Type":"ContainerStarted","Data":"ee05eb76fd08b034ff15ed1bfa7a1ae4ee53d345ebdc4bdf624b7855253621ab"} Sep 30 20:45:56 crc kubenswrapper[4919]: I0930 20:45:56.041115 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" podStartSLOduration=2.34841354 podStartE2EDuration="3.041092156s" podCreationTimestamp="2025-09-30 20:45:53 +0000 UTC" firstStartedPulling="2025-09-30 20:45:54.070405293 +0000 UTC m=+1939.186438420" lastFinishedPulling="2025-09-30 20:45:54.763083869 +0000 UTC m=+1939.879117036" observedRunningTime="2025-09-30 20:45:56.033401612 +0000 UTC m=+1941.149434739" watchObservedRunningTime="2025-09-30 20:45:56.041092156 +0000 UTC m=+1941.157125283" Sep 30 20:46:03 crc kubenswrapper[4919]: I0930 20:46:03.084990 4919 generic.go:334] "Generic (PLEG): container finished" podID="ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6" containerID="ee05eb76fd08b034ff15ed1bfa7a1ae4ee53d345ebdc4bdf624b7855253621ab" exitCode=0 Sep 30 20:46:03 crc kubenswrapper[4919]: I0930 20:46:03.085030 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" event={"ID":"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6","Type":"ContainerDied","Data":"ee05eb76fd08b034ff15ed1bfa7a1ae4ee53d345ebdc4bdf624b7855253621ab"} Sep 30 20:46:04 crc kubenswrapper[4919]: I0930 20:46:04.507163 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:46:04 crc kubenswrapper[4919]: I0930 20:46:04.594526 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs5pg\" (UniqueName: \"kubernetes.io/projected/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-kube-api-access-qs5pg\") pod \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\" (UID: \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\") " Sep 30 20:46:04 crc kubenswrapper[4919]: I0930 20:46:04.594655 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-inventory-0\") pod \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\" (UID: \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\") " Sep 30 20:46:04 crc kubenswrapper[4919]: I0930 20:46:04.594723 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-ssh-key-openstack-edpm-ipam\") pod \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\" (UID: \"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6\") " Sep 30 20:46:04 crc kubenswrapper[4919]: I0930 20:46:04.600541 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-kube-api-access-qs5pg" (OuterVolumeSpecName: "kube-api-access-qs5pg") pod "ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6" (UID: "ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6"). InnerVolumeSpecName "kube-api-access-qs5pg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:46:04 crc kubenswrapper[4919]: I0930 20:46:04.624181 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6" (UID: "ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:46:04 crc kubenswrapper[4919]: I0930 20:46:04.627364 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6" (UID: "ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:46:04 crc kubenswrapper[4919]: I0930 20:46:04.697108 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs5pg\" (UniqueName: \"kubernetes.io/projected/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-kube-api-access-qs5pg\") on node \"crc\" DevicePath \"\"" Sep 30 20:46:04 crc kubenswrapper[4919]: I0930 20:46:04.697318 4919 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:46:04 crc kubenswrapper[4919]: I0930 20:46:04.697379 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.107507 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" event={"ID":"ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6","Type":"ContainerDied","Data":"426238b6fcfd2595404cf8f74069c2212497f2af535a173d78c12ee03d3b2609"} Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.107548 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="426238b6fcfd2595404cf8f74069c2212497f2af535a173d78c12ee03d3b2609" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.107548 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-xdv77" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.186531 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc"] Sep 30 20:46:05 crc kubenswrapper[4919]: E0930 20:46:05.187296 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6" containerName="ssh-known-hosts-edpm-deployment" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.187320 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6" containerName="ssh-known-hosts-edpm-deployment" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.187587 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6" containerName="ssh-known-hosts-edpm-deployment" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.188388 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.195608 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.195655 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.195751 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.195782 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.206790 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c5zkc\" (UID: \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.206835 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c5zkc\" (UID: \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.206975 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ql622\" (UniqueName: \"kubernetes.io/projected/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-kube-api-access-ql622\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c5zkc\" (UID: \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.216257 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc"] Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.308327 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c5zkc\" (UID: \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.308393 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c5zkc\" (UID: \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.308577 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ql622\" (UniqueName: \"kubernetes.io/projected/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-kube-api-access-ql622\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c5zkc\" (UID: \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.313961 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c5zkc\" (UID: \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.314470 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c5zkc\" (UID: \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.327065 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ql622\" (UniqueName: \"kubernetes.io/projected/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-kube-api-access-ql622\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-c5zkc\" (UID: \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.514581 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:05 crc kubenswrapper[4919]: I0930 20:46:05.900144 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc"] Sep 30 20:46:06 crc kubenswrapper[4919]: I0930 20:46:06.131736 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" event={"ID":"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb","Type":"ContainerStarted","Data":"cd883d4dc9b861ee068e431e14d315f0859b5a8fbb77e96750b7e5b21644c5ba"} Sep 30 20:46:07 crc kubenswrapper[4919]: I0930 20:46:07.141619 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" event={"ID":"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb","Type":"ContainerStarted","Data":"ecebb47548f39f51cff53272f78f3c9fdc38681c9fe4715e566805d8e62a232b"} Sep 30 20:46:07 crc kubenswrapper[4919]: I0930 20:46:07.167221 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" podStartSLOduration=1.67404784 podStartE2EDuration="2.167191589s" podCreationTimestamp="2025-09-30 20:46:05 +0000 UTC" firstStartedPulling="2025-09-30 20:46:05.907973721 +0000 UTC m=+1951.024006848" lastFinishedPulling="2025-09-30 20:46:06.40111747 +0000 UTC m=+1951.517150597" observedRunningTime="2025-09-30 20:46:07.160807273 +0000 UTC m=+1952.276840410" watchObservedRunningTime="2025-09-30 20:46:07.167191589 +0000 UTC m=+1952.283224716" Sep 30 20:46:16 crc kubenswrapper[4919]: I0930 20:46:16.242059 4919 generic.go:334] "Generic (PLEG): container finished" podID="d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb" containerID="ecebb47548f39f51cff53272f78f3c9fdc38681c9fe4715e566805d8e62a232b" exitCode=0 Sep 30 20:46:16 crc kubenswrapper[4919]: I0930 20:46:16.242499 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" event={"ID":"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb","Type":"ContainerDied","Data":"ecebb47548f39f51cff53272f78f3c9fdc38681c9fe4715e566805d8e62a232b"} Sep 30 20:46:17 crc kubenswrapper[4919]: I0930 20:46:17.687860 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:17 crc kubenswrapper[4919]: I0930 20:46:17.761585 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-ssh-key\") pod \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\" (UID: \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\") " Sep 30 20:46:17 crc kubenswrapper[4919]: I0930 20:46:17.762568 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-inventory\") pod \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\" (UID: \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\") " Sep 30 20:46:17 crc kubenswrapper[4919]: I0930 20:46:17.762813 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ql622\" (UniqueName: \"kubernetes.io/projected/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-kube-api-access-ql622\") pod \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\" (UID: \"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb\") " Sep 30 20:46:17 crc kubenswrapper[4919]: I0930 20:46:17.770357 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-kube-api-access-ql622" (OuterVolumeSpecName: "kube-api-access-ql622") pod "d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb" (UID: "d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb"). InnerVolumeSpecName "kube-api-access-ql622". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:46:17 crc kubenswrapper[4919]: I0930 20:46:17.795334 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb" (UID: "d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:46:17 crc kubenswrapper[4919]: I0930 20:46:17.802842 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-inventory" (OuterVolumeSpecName: "inventory") pod "d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb" (UID: "d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:46:17 crc kubenswrapper[4919]: I0930 20:46:17.864866 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:46:17 crc kubenswrapper[4919]: I0930 20:46:17.864901 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ql622\" (UniqueName: \"kubernetes.io/projected/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-kube-api-access-ql622\") on node \"crc\" DevicePath \"\"" Sep 30 20:46:17 crc kubenswrapper[4919]: I0930 20:46:17.864912 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.263450 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" event={"ID":"d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb","Type":"ContainerDied","Data":"cd883d4dc9b861ee068e431e14d315f0859b5a8fbb77e96750b7e5b21644c5ba"} Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.263493 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd883d4dc9b861ee068e431e14d315f0859b5a8fbb77e96750b7e5b21644c5ba" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.263497 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-c5zkc" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.339796 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5"] Sep 30 20:46:18 crc kubenswrapper[4919]: E0930 20:46:18.340343 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.340369 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.340678 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.341476 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.344387 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.344693 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.344700 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.344841 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.352210 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5"] Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.475292 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4d98ac92-d78b-4df7-82bd-430f274f9ee0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5\" (UID: \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.475736 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km8kt\" (UniqueName: \"kubernetes.io/projected/4d98ac92-d78b-4df7-82bd-430f274f9ee0-kube-api-access-km8kt\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5\" (UID: \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.475861 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4d98ac92-d78b-4df7-82bd-430f274f9ee0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5\" (UID: \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.577573 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4d98ac92-d78b-4df7-82bd-430f274f9ee0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5\" (UID: \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.577707 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km8kt\" (UniqueName: \"kubernetes.io/projected/4d98ac92-d78b-4df7-82bd-430f274f9ee0-kube-api-access-km8kt\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5\" (UID: \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.577731 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4d98ac92-d78b-4df7-82bd-430f274f9ee0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5\" (UID: \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.583937 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4d98ac92-d78b-4df7-82bd-430f274f9ee0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5\" (UID: \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.587473 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4d98ac92-d78b-4df7-82bd-430f274f9ee0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5\" (UID: \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.598632 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km8kt\" (UniqueName: \"kubernetes.io/projected/4d98ac92-d78b-4df7-82bd-430f274f9ee0-kube-api-access-km8kt\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5\" (UID: \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:18 crc kubenswrapper[4919]: I0930 20:46:18.665596 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:19 crc kubenswrapper[4919]: I0930 20:46:19.164631 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5"] Sep 30 20:46:19 crc kubenswrapper[4919]: I0930 20:46:19.279095 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" event={"ID":"4d98ac92-d78b-4df7-82bd-430f274f9ee0","Type":"ContainerStarted","Data":"0900e0a32502dca0c16fc016cfeee38724f77e2053457634a34fc10d88b69df2"} Sep 30 20:46:20 crc kubenswrapper[4919]: I0930 20:46:20.290768 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" event={"ID":"4d98ac92-d78b-4df7-82bd-430f274f9ee0","Type":"ContainerStarted","Data":"0c355dc47537724ac44d605ecad3d1cbfbdc9c210e5e62a7f647eb3ef71dd807"} Sep 30 20:46:20 crc kubenswrapper[4919]: I0930 20:46:20.313445 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" podStartSLOduration=1.764191541 podStartE2EDuration="2.313426478s" podCreationTimestamp="2025-09-30 20:46:18 +0000 UTC" firstStartedPulling="2025-09-30 20:46:19.172164104 +0000 UTC m=+1964.288197251" lastFinishedPulling="2025-09-30 20:46:19.721399041 +0000 UTC m=+1964.837432188" observedRunningTime="2025-09-30 20:46:20.307887507 +0000 UTC m=+1965.423920664" watchObservedRunningTime="2025-09-30 20:46:20.313426478 +0000 UTC m=+1965.429459615" Sep 30 20:46:26 crc kubenswrapper[4919]: I0930 20:46:26.062107 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:46:26 crc kubenswrapper[4919]: I0930 20:46:26.062938 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:46:30 crc kubenswrapper[4919]: I0930 20:46:30.414340 4919 generic.go:334] "Generic (PLEG): container finished" podID="4d98ac92-d78b-4df7-82bd-430f274f9ee0" containerID="0c355dc47537724ac44d605ecad3d1cbfbdc9c210e5e62a7f647eb3ef71dd807" exitCode=0 Sep 30 20:46:30 crc kubenswrapper[4919]: I0930 20:46:30.414578 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" event={"ID":"4d98ac92-d78b-4df7-82bd-430f274f9ee0","Type":"ContainerDied","Data":"0c355dc47537724ac44d605ecad3d1cbfbdc9c210e5e62a7f647eb3ef71dd807"} Sep 30 20:46:31 crc kubenswrapper[4919]: I0930 20:46:31.840006 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:31 crc kubenswrapper[4919]: I0930 20:46:31.911342 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-km8kt\" (UniqueName: \"kubernetes.io/projected/4d98ac92-d78b-4df7-82bd-430f274f9ee0-kube-api-access-km8kt\") pod \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\" (UID: \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\") " Sep 30 20:46:31 crc kubenswrapper[4919]: I0930 20:46:31.911403 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4d98ac92-d78b-4df7-82bd-430f274f9ee0-inventory\") pod \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\" (UID: \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\") " Sep 30 20:46:31 crc kubenswrapper[4919]: I0930 20:46:31.911548 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4d98ac92-d78b-4df7-82bd-430f274f9ee0-ssh-key\") pod \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\" (UID: \"4d98ac92-d78b-4df7-82bd-430f274f9ee0\") " Sep 30 20:46:31 crc kubenswrapper[4919]: I0930 20:46:31.917397 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d98ac92-d78b-4df7-82bd-430f274f9ee0-kube-api-access-km8kt" (OuterVolumeSpecName: "kube-api-access-km8kt") pod "4d98ac92-d78b-4df7-82bd-430f274f9ee0" (UID: "4d98ac92-d78b-4df7-82bd-430f274f9ee0"). InnerVolumeSpecName "kube-api-access-km8kt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:46:31 crc kubenswrapper[4919]: I0930 20:46:31.944701 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d98ac92-d78b-4df7-82bd-430f274f9ee0-inventory" (OuterVolumeSpecName: "inventory") pod "4d98ac92-d78b-4df7-82bd-430f274f9ee0" (UID: "4d98ac92-d78b-4df7-82bd-430f274f9ee0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:46:31 crc kubenswrapper[4919]: I0930 20:46:31.951243 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d98ac92-d78b-4df7-82bd-430f274f9ee0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4d98ac92-d78b-4df7-82bd-430f274f9ee0" (UID: "4d98ac92-d78b-4df7-82bd-430f274f9ee0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.013794 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4d98ac92-d78b-4df7-82bd-430f274f9ee0-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.013840 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-km8kt\" (UniqueName: \"kubernetes.io/projected/4d98ac92-d78b-4df7-82bd-430f274f9ee0-kube-api-access-km8kt\") on node \"crc\" DevicePath \"\"" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.013855 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4d98ac92-d78b-4df7-82bd-430f274f9ee0-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.447681 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" event={"ID":"4d98ac92-d78b-4df7-82bd-430f274f9ee0","Type":"ContainerDied","Data":"0900e0a32502dca0c16fc016cfeee38724f77e2053457634a34fc10d88b69df2"} Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.448248 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0900e0a32502dca0c16fc016cfeee38724f77e2053457634a34fc10d88b69df2" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.447814 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.622059 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj"] Sep 30 20:46:32 crc kubenswrapper[4919]: E0930 20:46:32.622484 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d98ac92-d78b-4df7-82bd-430f274f9ee0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.622503 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d98ac92-d78b-4df7-82bd-430f274f9ee0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.622720 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d98ac92-d78b-4df7-82bd-430f274f9ee0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.623496 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.625914 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.626067 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.626095 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.626445 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.626605 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.626720 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.626803 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.626845 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.626928 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.626965 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.627016 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.627032 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.627093 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.627111 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.627126 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.627241 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt9dw\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-kube-api-access-lt9dw\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.627336 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.627380 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.627399 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.627461 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.627614 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.630263 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.641918 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj"] Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729052 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729096 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729138 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729173 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729234 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729277 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729301 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729319 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729361 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729385 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729408 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729449 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729466 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.729489 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt9dw\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-kube-api-access-lt9dw\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.733180 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.734297 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.734822 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.734840 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.735023 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.735531 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.736019 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.736386 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.738077 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.738425 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.738485 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.738566 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.741243 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.749560 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt9dw\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-kube-api-access-lt9dw\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-khgcj\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:32 crc kubenswrapper[4919]: I0930 20:46:32.945735 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:46:33 crc kubenswrapper[4919]: I0930 20:46:33.589890 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj"] Sep 30 20:46:34 crc kubenswrapper[4919]: I0930 20:46:34.473440 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" event={"ID":"5f109611-d866-4b48-be9e-2b8296544fcb","Type":"ContainerStarted","Data":"1c812ed6159ab200584573ef01b9fa1f147fedf131f44ecce4ab463980c77b61"} Sep 30 20:46:34 crc kubenswrapper[4919]: I0930 20:46:34.473723 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" event={"ID":"5f109611-d866-4b48-be9e-2b8296544fcb","Type":"ContainerStarted","Data":"609422d50db68e0fd3ac26726d73d127b74dbf56a22b2a5544e58ce8986fa8ca"} Sep 30 20:46:34 crc kubenswrapper[4919]: I0930 20:46:34.492284 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" podStartSLOduration=2.026402656 podStartE2EDuration="2.492265879s" podCreationTimestamp="2025-09-30 20:46:32 +0000 UTC" firstStartedPulling="2025-09-30 20:46:33.581727801 +0000 UTC m=+1978.697760968" lastFinishedPulling="2025-09-30 20:46:34.047591064 +0000 UTC m=+1979.163624191" observedRunningTime="2025-09-30 20:46:34.489109076 +0000 UTC m=+1979.605142243" watchObservedRunningTime="2025-09-30 20:46:34.492265879 +0000 UTC m=+1979.608299006" Sep 30 20:46:56 crc kubenswrapper[4919]: I0930 20:46:56.062547 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:46:56 crc kubenswrapper[4919]: I0930 20:46:56.063047 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:47:19 crc kubenswrapper[4919]: I0930 20:47:19.032559 4919 generic.go:334] "Generic (PLEG): container finished" podID="5f109611-d866-4b48-be9e-2b8296544fcb" containerID="1c812ed6159ab200584573ef01b9fa1f147fedf131f44ecce4ab463980c77b61" exitCode=0 Sep 30 20:47:19 crc kubenswrapper[4919]: I0930 20:47:19.032632 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" event={"ID":"5f109611-d866-4b48-be9e-2b8296544fcb","Type":"ContainerDied","Data":"1c812ed6159ab200584573ef01b9fa1f147fedf131f44ecce4ab463980c77b61"} Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.505621 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654337 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-neutron-metadata-combined-ca-bundle\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654416 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-ovn-default-certs-0\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654444 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-inventory\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654527 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-nova-combined-ca-bundle\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654587 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lt9dw\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-kube-api-access-lt9dw\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654643 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-repo-setup-combined-ca-bundle\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654693 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-libvirt-combined-ca-bundle\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654757 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-ssh-key\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654810 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-bootstrap-combined-ca-bundle\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654841 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-ovn-combined-ca-bundle\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654910 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654952 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.654994 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.655028 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-telemetry-combined-ca-bundle\") pod \"5f109611-d866-4b48-be9e-2b8296544fcb\" (UID: \"5f109611-d866-4b48-be9e-2b8296544fcb\") " Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.662669 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.663936 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.664419 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.664643 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.664787 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.666680 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.667299 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.667422 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.669177 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.675328 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.685741 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.688852 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-kube-api-access-lt9dw" (OuterVolumeSpecName: "kube-api-access-lt9dw") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "kube-api-access-lt9dw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.702918 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-inventory" (OuterVolumeSpecName: "inventory") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.706043 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5f109611-d866-4b48-be9e-2b8296544fcb" (UID: "5f109611-d866-4b48-be9e-2b8296544fcb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758480 4919 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758540 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758573 4919 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758601 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lt9dw\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-kube-api-access-lt9dw\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758626 4919 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758654 4919 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758682 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758709 4919 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758733 4919 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758760 4919 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758817 4919 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758852 4919 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/5f109611-d866-4b48-be9e-2b8296544fcb-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758882 4919 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:20 crc kubenswrapper[4919]: I0930 20:47:20.758910 4919 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f109611-d866-4b48-be9e-2b8296544fcb-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.063855 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" event={"ID":"5f109611-d866-4b48-be9e-2b8296544fcb","Type":"ContainerDied","Data":"609422d50db68e0fd3ac26726d73d127b74dbf56a22b2a5544e58ce8986fa8ca"} Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.063888 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-khgcj" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.063921 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="609422d50db68e0fd3ac26726d73d127b74dbf56a22b2a5544e58ce8986fa8ca" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.207595 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk"] Sep 30 20:47:21 crc kubenswrapper[4919]: E0930 20:47:21.208078 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f109611-d866-4b48-be9e-2b8296544fcb" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.208106 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f109611-d866-4b48-be9e-2b8296544fcb" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.208433 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f109611-d866-4b48-be9e-2b8296544fcb" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.209206 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.212145 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.212761 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.213476 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.214305 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.218517 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.244985 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk"] Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.380473 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.380549 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.380576 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.380817 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzmh5\" (UniqueName: \"kubernetes.io/projected/169a1e1c-15ca-4930-942f-48ac6a92d964-kube-api-access-gzmh5\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.380935 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/169a1e1c-15ca-4930-942f-48ac6a92d964-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.482771 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.483074 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.483197 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.483376 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzmh5\" (UniqueName: \"kubernetes.io/projected/169a1e1c-15ca-4930-942f-48ac6a92d964-kube-api-access-gzmh5\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.483490 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/169a1e1c-15ca-4930-942f-48ac6a92d964-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.484556 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/169a1e1c-15ca-4930-942f-48ac6a92d964-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.488098 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.489130 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.494374 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.515334 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzmh5\" (UniqueName: \"kubernetes.io/projected/169a1e1c-15ca-4930-942f-48ac6a92d964-kube-api-access-gzmh5\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-q2khk\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:21 crc kubenswrapper[4919]: I0930 20:47:21.534915 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:47:22 crc kubenswrapper[4919]: I0930 20:47:22.066334 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk"] Sep 30 20:47:23 crc kubenswrapper[4919]: I0930 20:47:23.088718 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" event={"ID":"169a1e1c-15ca-4930-942f-48ac6a92d964","Type":"ContainerStarted","Data":"6adf7da25540b35d527b163dae0e9e2d7e8ff187cfa9146a23369f4303bffe22"} Sep 30 20:47:23 crc kubenswrapper[4919]: I0930 20:47:23.089150 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" event={"ID":"169a1e1c-15ca-4930-942f-48ac6a92d964","Type":"ContainerStarted","Data":"a65791cbc15705b519c81cb79fccff5a68dd4a2d7365363e693982024eed4cfd"} Sep 30 20:47:23 crc kubenswrapper[4919]: I0930 20:47:23.124835 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" podStartSLOduration=1.693547286 podStartE2EDuration="2.124806068s" podCreationTimestamp="2025-09-30 20:47:21 +0000 UTC" firstStartedPulling="2025-09-30 20:47:22.081988579 +0000 UTC m=+2027.198021716" lastFinishedPulling="2025-09-30 20:47:22.513247361 +0000 UTC m=+2027.629280498" observedRunningTime="2025-09-30 20:47:23.11459176 +0000 UTC m=+2028.230624887" watchObservedRunningTime="2025-09-30 20:47:23.124806068 +0000 UTC m=+2028.240839225" Sep 30 20:47:26 crc kubenswrapper[4919]: I0930 20:47:26.062634 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:47:26 crc kubenswrapper[4919]: I0930 20:47:26.063054 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:47:26 crc kubenswrapper[4919]: I0930 20:47:26.063138 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:47:26 crc kubenswrapper[4919]: I0930 20:47:26.064421 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9d3002085d98ad4d2cc90f8e71e2652fb9ade38e472b41de06b7d6dc5a2524c9"} pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:47:26 crc kubenswrapper[4919]: I0930 20:47:26.064529 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" containerID="cri-o://9d3002085d98ad4d2cc90f8e71e2652fb9ade38e472b41de06b7d6dc5a2524c9" gracePeriod=600 Sep 30 20:47:27 crc kubenswrapper[4919]: I0930 20:47:27.139036 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb371a63-6d82-453e-930e-656710b97f10" containerID="9d3002085d98ad4d2cc90f8e71e2652fb9ade38e472b41de06b7d6dc5a2524c9" exitCode=0 Sep 30 20:47:27 crc kubenswrapper[4919]: I0930 20:47:27.139160 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerDied","Data":"9d3002085d98ad4d2cc90f8e71e2652fb9ade38e472b41de06b7d6dc5a2524c9"} Sep 30 20:47:27 crc kubenswrapper[4919]: I0930 20:47:27.139730 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a"} Sep 30 20:47:27 crc kubenswrapper[4919]: I0930 20:47:27.139758 4919 scope.go:117] "RemoveContainer" containerID="54a3723fe40805e5a2561a29ab03facb9fbac6d6d47d9344944991f3b25902c3" Sep 30 20:47:57 crc kubenswrapper[4919]: I0930 20:47:57.727957 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cn76x"] Sep 30 20:47:57 crc kubenswrapper[4919]: I0930 20:47:57.735509 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:47:57 crc kubenswrapper[4919]: I0930 20:47:57.744199 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cn76x"] Sep 30 20:47:57 crc kubenswrapper[4919]: I0930 20:47:57.907410 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjrdb\" (UniqueName: \"kubernetes.io/projected/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-kube-api-access-wjrdb\") pod \"redhat-operators-cn76x\" (UID: \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\") " pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:47:57 crc kubenswrapper[4919]: I0930 20:47:57.907524 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-catalog-content\") pod \"redhat-operators-cn76x\" (UID: \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\") " pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:47:57 crc kubenswrapper[4919]: I0930 20:47:57.907563 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-utilities\") pod \"redhat-operators-cn76x\" (UID: \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\") " pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:47:58 crc kubenswrapper[4919]: I0930 20:47:58.009435 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-catalog-content\") pod \"redhat-operators-cn76x\" (UID: \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\") " pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:47:58 crc kubenswrapper[4919]: I0930 20:47:58.009532 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-utilities\") pod \"redhat-operators-cn76x\" (UID: \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\") " pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:47:58 crc kubenswrapper[4919]: I0930 20:47:58.009656 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjrdb\" (UniqueName: \"kubernetes.io/projected/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-kube-api-access-wjrdb\") pod \"redhat-operators-cn76x\" (UID: \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\") " pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:47:58 crc kubenswrapper[4919]: I0930 20:47:58.010297 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-utilities\") pod \"redhat-operators-cn76x\" (UID: \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\") " pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:47:58 crc kubenswrapper[4919]: I0930 20:47:58.010321 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-catalog-content\") pod \"redhat-operators-cn76x\" (UID: \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\") " pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:47:58 crc kubenswrapper[4919]: I0930 20:47:58.043375 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjrdb\" (UniqueName: \"kubernetes.io/projected/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-kube-api-access-wjrdb\") pod \"redhat-operators-cn76x\" (UID: \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\") " pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:47:58 crc kubenswrapper[4919]: I0930 20:47:58.062746 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:47:58 crc kubenswrapper[4919]: I0930 20:47:58.542916 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cn76x"] Sep 30 20:47:59 crc kubenswrapper[4919]: I0930 20:47:59.550196 4919 generic.go:334] "Generic (PLEG): container finished" podID="25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" containerID="e680a5d06755c2e5a20e9af83ee1f053a98a58144494d72a7c7ea04073f8a715" exitCode=0 Sep 30 20:47:59 crc kubenswrapper[4919]: I0930 20:47:59.550283 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cn76x" event={"ID":"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2","Type":"ContainerDied","Data":"e680a5d06755c2e5a20e9af83ee1f053a98a58144494d72a7c7ea04073f8a715"} Sep 30 20:47:59 crc kubenswrapper[4919]: I0930 20:47:59.550501 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cn76x" event={"ID":"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2","Type":"ContainerStarted","Data":"e5f7e5afd72df1d5b9ca34389f78dbdc89a016b8279b872b0df7a77e5ecfd59c"} Sep 30 20:48:00 crc kubenswrapper[4919]: I0930 20:48:00.573863 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cn76x" event={"ID":"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2","Type":"ContainerStarted","Data":"34136fb702bf1eae0761623f8898ad7f6d23b7078594fdfd2eb65f4ed541c81c"} Sep 30 20:48:01 crc kubenswrapper[4919]: I0930 20:48:01.588185 4919 generic.go:334] "Generic (PLEG): container finished" podID="25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" containerID="34136fb702bf1eae0761623f8898ad7f6d23b7078594fdfd2eb65f4ed541c81c" exitCode=0 Sep 30 20:48:01 crc kubenswrapper[4919]: I0930 20:48:01.588270 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cn76x" event={"ID":"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2","Type":"ContainerDied","Data":"34136fb702bf1eae0761623f8898ad7f6d23b7078594fdfd2eb65f4ed541c81c"} Sep 30 20:48:02 crc kubenswrapper[4919]: I0930 20:48:02.606267 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cn76x" event={"ID":"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2","Type":"ContainerStarted","Data":"e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0"} Sep 30 20:48:02 crc kubenswrapper[4919]: I0930 20:48:02.639569 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cn76x" podStartSLOduration=3.118145622 podStartE2EDuration="5.639543835s" podCreationTimestamp="2025-09-30 20:47:57 +0000 UTC" firstStartedPulling="2025-09-30 20:47:59.55331379 +0000 UTC m=+2064.669346957" lastFinishedPulling="2025-09-30 20:48:02.074712013 +0000 UTC m=+2067.190745170" observedRunningTime="2025-09-30 20:48:02.625705831 +0000 UTC m=+2067.741739028" watchObservedRunningTime="2025-09-30 20:48:02.639543835 +0000 UTC m=+2067.755576992" Sep 30 20:48:08 crc kubenswrapper[4919]: I0930 20:48:08.063132 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:48:08 crc kubenswrapper[4919]: I0930 20:48:08.063773 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:48:08 crc kubenswrapper[4919]: I0930 20:48:08.152777 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:48:08 crc kubenswrapper[4919]: I0930 20:48:08.750674 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:48:08 crc kubenswrapper[4919]: I0930 20:48:08.821273 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cn76x"] Sep 30 20:48:10 crc kubenswrapper[4919]: I0930 20:48:10.692389 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cn76x" podUID="25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" containerName="registry-server" containerID="cri-o://e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0" gracePeriod=2 Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.141473 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.294018 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-catalog-content\") pod \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\" (UID: \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\") " Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.294301 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wjrdb\" (UniqueName: \"kubernetes.io/projected/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-kube-api-access-wjrdb\") pod \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\" (UID: \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\") " Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.294356 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-utilities\") pod \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\" (UID: \"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2\") " Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.296336 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-utilities" (OuterVolumeSpecName: "utilities") pod "25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" (UID: "25f67d58-33a7-4d4f-b5e4-38e0901ad4a2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.318628 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-kube-api-access-wjrdb" (OuterVolumeSpecName: "kube-api-access-wjrdb") pod "25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" (UID: "25f67d58-33a7-4d4f-b5e4-38e0901ad4a2"). InnerVolumeSpecName "kube-api-access-wjrdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.393815 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" (UID: "25f67d58-33a7-4d4f-b5e4-38e0901ad4a2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.396850 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wjrdb\" (UniqueName: \"kubernetes.io/projected/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-kube-api-access-wjrdb\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.396997 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.397080 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.713412 4919 generic.go:334] "Generic (PLEG): container finished" podID="25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" containerID="e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0" exitCode=0 Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.713495 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cn76x" event={"ID":"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2","Type":"ContainerDied","Data":"e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0"} Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.713900 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cn76x" event={"ID":"25f67d58-33a7-4d4f-b5e4-38e0901ad4a2","Type":"ContainerDied","Data":"e5f7e5afd72df1d5b9ca34389f78dbdc89a016b8279b872b0df7a77e5ecfd59c"} Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.713938 4919 scope.go:117] "RemoveContainer" containerID="e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.713546 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cn76x" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.756257 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cn76x"] Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.768100 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cn76x"] Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.770145 4919 scope.go:117] "RemoveContainer" containerID="34136fb702bf1eae0761623f8898ad7f6d23b7078594fdfd2eb65f4ed541c81c" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.805816 4919 scope.go:117] "RemoveContainer" containerID="e680a5d06755c2e5a20e9af83ee1f053a98a58144494d72a7c7ea04073f8a715" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.858574 4919 scope.go:117] "RemoveContainer" containerID="e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0" Sep 30 20:48:11 crc kubenswrapper[4919]: E0930 20:48:11.859262 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0\": container with ID starting with e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0 not found: ID does not exist" containerID="e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.859394 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0"} err="failed to get container status \"e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0\": rpc error: code = NotFound desc = could not find container \"e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0\": container with ID starting with e34953b2efb574ca4517947d0b34aeee40aad9d8cbea052051fa084e068bf4c0 not found: ID does not exist" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.859512 4919 scope.go:117] "RemoveContainer" containerID="34136fb702bf1eae0761623f8898ad7f6d23b7078594fdfd2eb65f4ed541c81c" Sep 30 20:48:11 crc kubenswrapper[4919]: E0930 20:48:11.859926 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34136fb702bf1eae0761623f8898ad7f6d23b7078594fdfd2eb65f4ed541c81c\": container with ID starting with 34136fb702bf1eae0761623f8898ad7f6d23b7078594fdfd2eb65f4ed541c81c not found: ID does not exist" containerID="34136fb702bf1eae0761623f8898ad7f6d23b7078594fdfd2eb65f4ed541c81c" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.859959 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34136fb702bf1eae0761623f8898ad7f6d23b7078594fdfd2eb65f4ed541c81c"} err="failed to get container status \"34136fb702bf1eae0761623f8898ad7f6d23b7078594fdfd2eb65f4ed541c81c\": rpc error: code = NotFound desc = could not find container \"34136fb702bf1eae0761623f8898ad7f6d23b7078594fdfd2eb65f4ed541c81c\": container with ID starting with 34136fb702bf1eae0761623f8898ad7f6d23b7078594fdfd2eb65f4ed541c81c not found: ID does not exist" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.859980 4919 scope.go:117] "RemoveContainer" containerID="e680a5d06755c2e5a20e9af83ee1f053a98a58144494d72a7c7ea04073f8a715" Sep 30 20:48:11 crc kubenswrapper[4919]: E0930 20:48:11.860321 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e680a5d06755c2e5a20e9af83ee1f053a98a58144494d72a7c7ea04073f8a715\": container with ID starting with e680a5d06755c2e5a20e9af83ee1f053a98a58144494d72a7c7ea04073f8a715 not found: ID does not exist" containerID="e680a5d06755c2e5a20e9af83ee1f053a98a58144494d72a7c7ea04073f8a715" Sep 30 20:48:11 crc kubenswrapper[4919]: I0930 20:48:11.860433 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e680a5d06755c2e5a20e9af83ee1f053a98a58144494d72a7c7ea04073f8a715"} err="failed to get container status \"e680a5d06755c2e5a20e9af83ee1f053a98a58144494d72a7c7ea04073f8a715\": rpc error: code = NotFound desc = could not find container \"e680a5d06755c2e5a20e9af83ee1f053a98a58144494d72a7c7ea04073f8a715\": container with ID starting with e680a5d06755c2e5a20e9af83ee1f053a98a58144494d72a7c7ea04073f8a715 not found: ID does not exist" Sep 30 20:48:13 crc kubenswrapper[4919]: I0930 20:48:13.656538 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" path="/var/lib/kubelet/pods/25f67d58-33a7-4d4f-b5e4-38e0901ad4a2/volumes" Sep 30 20:48:36 crc kubenswrapper[4919]: I0930 20:48:36.910634 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-r7f5r"] Sep 30 20:48:36 crc kubenswrapper[4919]: E0930 20:48:36.911515 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" containerName="registry-server" Sep 30 20:48:36 crc kubenswrapper[4919]: I0930 20:48:36.911528 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" containerName="registry-server" Sep 30 20:48:36 crc kubenswrapper[4919]: E0930 20:48:36.911567 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" containerName="extract-content" Sep 30 20:48:36 crc kubenswrapper[4919]: I0930 20:48:36.911574 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" containerName="extract-content" Sep 30 20:48:36 crc kubenswrapper[4919]: E0930 20:48:36.911588 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" containerName="extract-utilities" Sep 30 20:48:36 crc kubenswrapper[4919]: I0930 20:48:36.911595 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" containerName="extract-utilities" Sep 30 20:48:36 crc kubenswrapper[4919]: I0930 20:48:36.911757 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="25f67d58-33a7-4d4f-b5e4-38e0901ad4a2" containerName="registry-server" Sep 30 20:48:36 crc kubenswrapper[4919]: I0930 20:48:36.913312 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:36 crc kubenswrapper[4919]: I0930 20:48:36.928947 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r7f5r"] Sep 30 20:48:36 crc kubenswrapper[4919]: I0930 20:48:36.983701 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-catalog-content\") pod \"redhat-marketplace-r7f5r\" (UID: \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\") " pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:36 crc kubenswrapper[4919]: I0930 20:48:36.984045 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-utilities\") pod \"redhat-marketplace-r7f5r\" (UID: \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\") " pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:36 crc kubenswrapper[4919]: I0930 20:48:36.984113 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv886\" (UniqueName: \"kubernetes.io/projected/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-kube-api-access-kv886\") pod \"redhat-marketplace-r7f5r\" (UID: \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\") " pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:37 crc kubenswrapper[4919]: I0930 20:48:37.000273 4919 generic.go:334] "Generic (PLEG): container finished" podID="169a1e1c-15ca-4930-942f-48ac6a92d964" containerID="6adf7da25540b35d527b163dae0e9e2d7e8ff187cfa9146a23369f4303bffe22" exitCode=0 Sep 30 20:48:37 crc kubenswrapper[4919]: I0930 20:48:37.000312 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" event={"ID":"169a1e1c-15ca-4930-942f-48ac6a92d964","Type":"ContainerDied","Data":"6adf7da25540b35d527b163dae0e9e2d7e8ff187cfa9146a23369f4303bffe22"} Sep 30 20:48:37 crc kubenswrapper[4919]: I0930 20:48:37.086242 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-catalog-content\") pod \"redhat-marketplace-r7f5r\" (UID: \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\") " pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:37 crc kubenswrapper[4919]: I0930 20:48:37.086568 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-utilities\") pod \"redhat-marketplace-r7f5r\" (UID: \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\") " pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:37 crc kubenswrapper[4919]: I0930 20:48:37.086630 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv886\" (UniqueName: \"kubernetes.io/projected/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-kube-api-access-kv886\") pod \"redhat-marketplace-r7f5r\" (UID: \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\") " pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:37 crc kubenswrapper[4919]: I0930 20:48:37.086874 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-catalog-content\") pod \"redhat-marketplace-r7f5r\" (UID: \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\") " pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:37 crc kubenswrapper[4919]: I0930 20:48:37.087387 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-utilities\") pod \"redhat-marketplace-r7f5r\" (UID: \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\") " pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:37 crc kubenswrapper[4919]: I0930 20:48:37.112632 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv886\" (UniqueName: \"kubernetes.io/projected/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-kube-api-access-kv886\") pod \"redhat-marketplace-r7f5r\" (UID: \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\") " pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:37 crc kubenswrapper[4919]: I0930 20:48:37.229079 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:37 crc kubenswrapper[4919]: I0930 20:48:37.738172 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r7f5r"] Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.013188 4919 generic.go:334] "Generic (PLEG): container finished" podID="abfe8ce7-2d1b-4060-972a-46d24c46b6d8" containerID="787d6c8b0632fa099db55e9e7d50c41a97f33214be44c7debd29d2b4069cc64a" exitCode=0 Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.013241 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r7f5r" event={"ID":"abfe8ce7-2d1b-4060-972a-46d24c46b6d8","Type":"ContainerDied","Data":"787d6c8b0632fa099db55e9e7d50c41a97f33214be44c7debd29d2b4069cc64a"} Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.013622 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r7f5r" event={"ID":"abfe8ce7-2d1b-4060-972a-46d24c46b6d8","Type":"ContainerStarted","Data":"774af7859948b2a600cd92a82e004a8f960f153021663e6b89ec966494e1c1e5"} Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.395372 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.413255 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/169a1e1c-15ca-4930-942f-48ac6a92d964-ovncontroller-config-0\") pod \"169a1e1c-15ca-4930-942f-48ac6a92d964\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.413503 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-ovn-combined-ca-bundle\") pod \"169a1e1c-15ca-4930-942f-48ac6a92d964\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.413576 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzmh5\" (UniqueName: \"kubernetes.io/projected/169a1e1c-15ca-4930-942f-48ac6a92d964-kube-api-access-gzmh5\") pod \"169a1e1c-15ca-4930-942f-48ac6a92d964\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.413613 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-inventory\") pod \"169a1e1c-15ca-4930-942f-48ac6a92d964\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.413773 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-ssh-key\") pod \"169a1e1c-15ca-4930-942f-48ac6a92d964\" (UID: \"169a1e1c-15ca-4930-942f-48ac6a92d964\") " Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.423629 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "169a1e1c-15ca-4930-942f-48ac6a92d964" (UID: "169a1e1c-15ca-4930-942f-48ac6a92d964"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.423648 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/169a1e1c-15ca-4930-942f-48ac6a92d964-kube-api-access-gzmh5" (OuterVolumeSpecName: "kube-api-access-gzmh5") pod "169a1e1c-15ca-4930-942f-48ac6a92d964" (UID: "169a1e1c-15ca-4930-942f-48ac6a92d964"). InnerVolumeSpecName "kube-api-access-gzmh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.451362 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-inventory" (OuterVolumeSpecName: "inventory") pod "169a1e1c-15ca-4930-942f-48ac6a92d964" (UID: "169a1e1c-15ca-4930-942f-48ac6a92d964"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.453769 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/169a1e1c-15ca-4930-942f-48ac6a92d964-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "169a1e1c-15ca-4930-942f-48ac6a92d964" (UID: "169a1e1c-15ca-4930-942f-48ac6a92d964"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.456672 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "169a1e1c-15ca-4930-942f-48ac6a92d964" (UID: "169a1e1c-15ca-4930-942f-48ac6a92d964"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.517078 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.517131 4919 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/169a1e1c-15ca-4930-942f-48ac6a92d964-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.517148 4919 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.517160 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzmh5\" (UniqueName: \"kubernetes.io/projected/169a1e1c-15ca-4930-942f-48ac6a92d964-kube-api-access-gzmh5\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:38 crc kubenswrapper[4919]: I0930 20:48:38.517172 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/169a1e1c-15ca-4930-942f-48ac6a92d964-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.028466 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" event={"ID":"169a1e1c-15ca-4930-942f-48ac6a92d964","Type":"ContainerDied","Data":"a65791cbc15705b519c81cb79fccff5a68dd4a2d7365363e693982024eed4cfd"} Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.028527 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a65791cbc15705b519c81cb79fccff5a68dd4a2d7365363e693982024eed4cfd" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.028580 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-q2khk" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.177866 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp"] Sep 30 20:48:39 crc kubenswrapper[4919]: E0930 20:48:39.178589 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="169a1e1c-15ca-4930-942f-48ac6a92d964" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.178623 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="169a1e1c-15ca-4930-942f-48ac6a92d964" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.179024 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="169a1e1c-15ca-4930-942f-48ac6a92d964" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.180847 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.184324 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.184583 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.184614 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.184890 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.187719 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.192590 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.194258 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp"] Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.230201 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.230260 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.230433 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x52nj\" (UniqueName: \"kubernetes.io/projected/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-kube-api-access-x52nj\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.230471 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.230492 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.230528 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.332697 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.332828 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.332857 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.332979 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x52nj\" (UniqueName: \"kubernetes.io/projected/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-kube-api-access-x52nj\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.333001 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.333022 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.337076 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.338138 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.339803 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.340169 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.342055 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.355527 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x52nj\" (UniqueName: \"kubernetes.io/projected/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-kube-api-access-x52nj\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:39 crc kubenswrapper[4919]: I0930 20:48:39.508352 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:48:40 crc kubenswrapper[4919]: I0930 20:48:40.044415 4919 generic.go:334] "Generic (PLEG): container finished" podID="abfe8ce7-2d1b-4060-972a-46d24c46b6d8" containerID="a3b442d055cc678ec30494cee93c8756383731e805768403020eb863a6f41be4" exitCode=0 Sep 30 20:48:40 crc kubenswrapper[4919]: I0930 20:48:40.044466 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r7f5r" event={"ID":"abfe8ce7-2d1b-4060-972a-46d24c46b6d8","Type":"ContainerDied","Data":"a3b442d055cc678ec30494cee93c8756383731e805768403020eb863a6f41be4"} Sep 30 20:48:40 crc kubenswrapper[4919]: I0930 20:48:40.099989 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp"] Sep 30 20:48:40 crc kubenswrapper[4919]: W0930 20:48:40.103344 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64b3c647_b4dd_4f5f_9ddc_001dd913c43c.slice/crio-ad82df6b6b88c9351f367e988680a4daa607a40be1f8892721c3bb53c4e76d04 WatchSource:0}: Error finding container ad82df6b6b88c9351f367e988680a4daa607a40be1f8892721c3bb53c4e76d04: Status 404 returned error can't find the container with id ad82df6b6b88c9351f367e988680a4daa607a40be1f8892721c3bb53c4e76d04 Sep 30 20:48:41 crc kubenswrapper[4919]: I0930 20:48:41.057608 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r7f5r" event={"ID":"abfe8ce7-2d1b-4060-972a-46d24c46b6d8","Type":"ContainerStarted","Data":"3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16"} Sep 30 20:48:41 crc kubenswrapper[4919]: I0930 20:48:41.059351 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" event={"ID":"64b3c647-b4dd-4f5f-9ddc-001dd913c43c","Type":"ContainerStarted","Data":"47f5b5437d29c6b3edac998f1c1c6521e2b66c70681127ac49a64ee0295c6734"} Sep 30 20:48:41 crc kubenswrapper[4919]: I0930 20:48:41.059401 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" event={"ID":"64b3c647-b4dd-4f5f-9ddc-001dd913c43c","Type":"ContainerStarted","Data":"ad82df6b6b88c9351f367e988680a4daa607a40be1f8892721c3bb53c4e76d04"} Sep 30 20:48:41 crc kubenswrapper[4919]: I0930 20:48:41.090349 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-r7f5r" podStartSLOduration=2.509952511 podStartE2EDuration="5.090328316s" podCreationTimestamp="2025-09-30 20:48:36 +0000 UTC" firstStartedPulling="2025-09-30 20:48:38.016331798 +0000 UTC m=+2103.132364935" lastFinishedPulling="2025-09-30 20:48:40.596707603 +0000 UTC m=+2105.712740740" observedRunningTime="2025-09-30 20:48:41.083342892 +0000 UTC m=+2106.199376029" watchObservedRunningTime="2025-09-30 20:48:41.090328316 +0000 UTC m=+2106.206361443" Sep 30 20:48:41 crc kubenswrapper[4919]: I0930 20:48:41.110031 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" podStartSLOduration=1.506314613 podStartE2EDuration="2.110012111s" podCreationTimestamp="2025-09-30 20:48:39 +0000 UTC" firstStartedPulling="2025-09-30 20:48:40.105337335 +0000 UTC m=+2105.221370452" lastFinishedPulling="2025-09-30 20:48:40.709034823 +0000 UTC m=+2105.825067950" observedRunningTime="2025-09-30 20:48:41.106107887 +0000 UTC m=+2106.222141024" watchObservedRunningTime="2025-09-30 20:48:41.110012111 +0000 UTC m=+2106.226045238" Sep 30 20:48:47 crc kubenswrapper[4919]: I0930 20:48:47.229589 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:47 crc kubenswrapper[4919]: I0930 20:48:47.230181 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:47 crc kubenswrapper[4919]: I0930 20:48:47.290343 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:48 crc kubenswrapper[4919]: I0930 20:48:48.185842 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:48 crc kubenswrapper[4919]: I0930 20:48:48.229936 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r7f5r"] Sep 30 20:48:50 crc kubenswrapper[4919]: I0930 20:48:50.153746 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-r7f5r" podUID="abfe8ce7-2d1b-4060-972a-46d24c46b6d8" containerName="registry-server" containerID="cri-o://3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16" gracePeriod=2 Sep 30 20:48:50 crc kubenswrapper[4919]: I0930 20:48:50.577847 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:50 crc kubenswrapper[4919]: I0930 20:48:50.787694 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kv886\" (UniqueName: \"kubernetes.io/projected/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-kube-api-access-kv886\") pod \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\" (UID: \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\") " Sep 30 20:48:50 crc kubenswrapper[4919]: I0930 20:48:50.788068 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-catalog-content\") pod \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\" (UID: \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\") " Sep 30 20:48:50 crc kubenswrapper[4919]: I0930 20:48:50.788143 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-utilities\") pod \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\" (UID: \"abfe8ce7-2d1b-4060-972a-46d24c46b6d8\") " Sep 30 20:48:50 crc kubenswrapper[4919]: I0930 20:48:50.791450 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-utilities" (OuterVolumeSpecName: "utilities") pod "abfe8ce7-2d1b-4060-972a-46d24c46b6d8" (UID: "abfe8ce7-2d1b-4060-972a-46d24c46b6d8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:48:50 crc kubenswrapper[4919]: I0930 20:48:50.793924 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-kube-api-access-kv886" (OuterVolumeSpecName: "kube-api-access-kv886") pod "abfe8ce7-2d1b-4060-972a-46d24c46b6d8" (UID: "abfe8ce7-2d1b-4060-972a-46d24c46b6d8"). InnerVolumeSpecName "kube-api-access-kv886". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:48:50 crc kubenswrapper[4919]: I0930 20:48:50.802313 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "abfe8ce7-2d1b-4060-972a-46d24c46b6d8" (UID: "abfe8ce7-2d1b-4060-972a-46d24c46b6d8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:48:50 crc kubenswrapper[4919]: I0930 20:48:50.890948 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:50 crc kubenswrapper[4919]: I0930 20:48:50.890976 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:50 crc kubenswrapper[4919]: I0930 20:48:50.890986 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kv886\" (UniqueName: \"kubernetes.io/projected/abfe8ce7-2d1b-4060-972a-46d24c46b6d8-kube-api-access-kv886\") on node \"crc\" DevicePath \"\"" Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.168488 4919 generic.go:334] "Generic (PLEG): container finished" podID="abfe8ce7-2d1b-4060-972a-46d24c46b6d8" containerID="3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16" exitCode=0 Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.168547 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r7f5r" event={"ID":"abfe8ce7-2d1b-4060-972a-46d24c46b6d8","Type":"ContainerDied","Data":"3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16"} Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.168584 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r7f5r" event={"ID":"abfe8ce7-2d1b-4060-972a-46d24c46b6d8","Type":"ContainerDied","Data":"774af7859948b2a600cd92a82e004a8f960f153021663e6b89ec966494e1c1e5"} Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.168614 4919 scope.go:117] "RemoveContainer" containerID="3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16" Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.169805 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r7f5r" Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.194684 4919 scope.go:117] "RemoveContainer" containerID="a3b442d055cc678ec30494cee93c8756383731e805768403020eb863a6f41be4" Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.217423 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r7f5r"] Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.227250 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-r7f5r"] Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.230774 4919 scope.go:117] "RemoveContainer" containerID="787d6c8b0632fa099db55e9e7d50c41a97f33214be44c7debd29d2b4069cc64a" Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.266947 4919 scope.go:117] "RemoveContainer" containerID="3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16" Sep 30 20:48:51 crc kubenswrapper[4919]: E0930 20:48:51.267576 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16\": container with ID starting with 3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16 not found: ID does not exist" containerID="3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16" Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.267622 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16"} err="failed to get container status \"3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16\": rpc error: code = NotFound desc = could not find container \"3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16\": container with ID starting with 3848bde942c1d13bfee74d89c2836d74b23f84a326eaf32c21081774e9a65e16 not found: ID does not exist" Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.267651 4919 scope.go:117] "RemoveContainer" containerID="a3b442d055cc678ec30494cee93c8756383731e805768403020eb863a6f41be4" Sep 30 20:48:51 crc kubenswrapper[4919]: E0930 20:48:51.268319 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3b442d055cc678ec30494cee93c8756383731e805768403020eb863a6f41be4\": container with ID starting with a3b442d055cc678ec30494cee93c8756383731e805768403020eb863a6f41be4 not found: ID does not exist" containerID="a3b442d055cc678ec30494cee93c8756383731e805768403020eb863a6f41be4" Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.268374 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3b442d055cc678ec30494cee93c8756383731e805768403020eb863a6f41be4"} err="failed to get container status \"a3b442d055cc678ec30494cee93c8756383731e805768403020eb863a6f41be4\": rpc error: code = NotFound desc = could not find container \"a3b442d055cc678ec30494cee93c8756383731e805768403020eb863a6f41be4\": container with ID starting with a3b442d055cc678ec30494cee93c8756383731e805768403020eb863a6f41be4 not found: ID does not exist" Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.268417 4919 scope.go:117] "RemoveContainer" containerID="787d6c8b0632fa099db55e9e7d50c41a97f33214be44c7debd29d2b4069cc64a" Sep 30 20:48:51 crc kubenswrapper[4919]: E0930 20:48:51.268995 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"787d6c8b0632fa099db55e9e7d50c41a97f33214be44c7debd29d2b4069cc64a\": container with ID starting with 787d6c8b0632fa099db55e9e7d50c41a97f33214be44c7debd29d2b4069cc64a not found: ID does not exist" containerID="787d6c8b0632fa099db55e9e7d50c41a97f33214be44c7debd29d2b4069cc64a" Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.269022 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"787d6c8b0632fa099db55e9e7d50c41a97f33214be44c7debd29d2b4069cc64a"} err="failed to get container status \"787d6c8b0632fa099db55e9e7d50c41a97f33214be44c7debd29d2b4069cc64a\": rpc error: code = NotFound desc = could not find container \"787d6c8b0632fa099db55e9e7d50c41a97f33214be44c7debd29d2b4069cc64a\": container with ID starting with 787d6c8b0632fa099db55e9e7d50c41a97f33214be44c7debd29d2b4069cc64a not found: ID does not exist" Sep 30 20:48:51 crc kubenswrapper[4919]: I0930 20:48:51.642517 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abfe8ce7-2d1b-4060-972a-46d24c46b6d8" path="/var/lib/kubelet/pods/abfe8ce7-2d1b-4060-972a-46d24c46b6d8/volumes" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.488619 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c5d8q"] Sep 30 20:49:22 crc kubenswrapper[4919]: E0930 20:49:22.489853 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abfe8ce7-2d1b-4060-972a-46d24c46b6d8" containerName="extract-utilities" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.489877 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="abfe8ce7-2d1b-4060-972a-46d24c46b6d8" containerName="extract-utilities" Sep 30 20:49:22 crc kubenswrapper[4919]: E0930 20:49:22.489920 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abfe8ce7-2d1b-4060-972a-46d24c46b6d8" containerName="extract-content" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.489934 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="abfe8ce7-2d1b-4060-972a-46d24c46b6d8" containerName="extract-content" Sep 30 20:49:22 crc kubenswrapper[4919]: E0930 20:49:22.489952 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abfe8ce7-2d1b-4060-972a-46d24c46b6d8" containerName="registry-server" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.489965 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="abfe8ce7-2d1b-4060-972a-46d24c46b6d8" containerName="registry-server" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.490333 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="abfe8ce7-2d1b-4060-972a-46d24c46b6d8" containerName="registry-server" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.492770 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.499950 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c5d8q"] Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.590445 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8q5z\" (UniqueName: \"kubernetes.io/projected/455581bc-2151-467c-b96d-224a9466c85a-kube-api-access-k8q5z\") pod \"certified-operators-c5d8q\" (UID: \"455581bc-2151-467c-b96d-224a9466c85a\") " pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.591103 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/455581bc-2151-467c-b96d-224a9466c85a-catalog-content\") pod \"certified-operators-c5d8q\" (UID: \"455581bc-2151-467c-b96d-224a9466c85a\") " pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.591271 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/455581bc-2151-467c-b96d-224a9466c85a-utilities\") pod \"certified-operators-c5d8q\" (UID: \"455581bc-2151-467c-b96d-224a9466c85a\") " pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.693514 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/455581bc-2151-467c-b96d-224a9466c85a-catalog-content\") pod \"certified-operators-c5d8q\" (UID: \"455581bc-2151-467c-b96d-224a9466c85a\") " pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.693580 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/455581bc-2151-467c-b96d-224a9466c85a-utilities\") pod \"certified-operators-c5d8q\" (UID: \"455581bc-2151-467c-b96d-224a9466c85a\") " pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.693666 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8q5z\" (UniqueName: \"kubernetes.io/projected/455581bc-2151-467c-b96d-224a9466c85a-kube-api-access-k8q5z\") pod \"certified-operators-c5d8q\" (UID: \"455581bc-2151-467c-b96d-224a9466c85a\") " pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.695423 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/455581bc-2151-467c-b96d-224a9466c85a-utilities\") pod \"certified-operators-c5d8q\" (UID: \"455581bc-2151-467c-b96d-224a9466c85a\") " pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.695447 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/455581bc-2151-467c-b96d-224a9466c85a-catalog-content\") pod \"certified-operators-c5d8q\" (UID: \"455581bc-2151-467c-b96d-224a9466c85a\") " pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.725576 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8q5z\" (UniqueName: \"kubernetes.io/projected/455581bc-2151-467c-b96d-224a9466c85a-kube-api-access-k8q5z\") pod \"certified-operators-c5d8q\" (UID: \"455581bc-2151-467c-b96d-224a9466c85a\") " pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:22 crc kubenswrapper[4919]: I0930 20:49:22.836182 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:23 crc kubenswrapper[4919]: I0930 20:49:23.359810 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c5d8q"] Sep 30 20:49:23 crc kubenswrapper[4919]: I0930 20:49:23.495935 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5d8q" event={"ID":"455581bc-2151-467c-b96d-224a9466c85a","Type":"ContainerStarted","Data":"b0d272a7732f750950853600f0ba2624a15d541cd49628499be60abfd7888a37"} Sep 30 20:49:24 crc kubenswrapper[4919]: I0930 20:49:24.509815 4919 generic.go:334] "Generic (PLEG): container finished" podID="455581bc-2151-467c-b96d-224a9466c85a" containerID="955aba0a1c996959669a7eded4985214501eeb0d18847aecc522256d99e4cc4f" exitCode=0 Sep 30 20:49:24 crc kubenswrapper[4919]: I0930 20:49:24.509912 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5d8q" event={"ID":"455581bc-2151-467c-b96d-224a9466c85a","Type":"ContainerDied","Data":"955aba0a1c996959669a7eded4985214501eeb0d18847aecc522256d99e4cc4f"} Sep 30 20:49:24 crc kubenswrapper[4919]: I0930 20:49:24.513173 4919 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:49:26 crc kubenswrapper[4919]: I0930 20:49:26.062397 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:49:26 crc kubenswrapper[4919]: I0930 20:49:26.062720 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:49:26 crc kubenswrapper[4919]: I0930 20:49:26.545477 4919 generic.go:334] "Generic (PLEG): container finished" podID="455581bc-2151-467c-b96d-224a9466c85a" containerID="397c5dba1a43649e1040a6ce660e19000a3a75752444f7168000de8aa2572a03" exitCode=0 Sep 30 20:49:26 crc kubenswrapper[4919]: I0930 20:49:26.545929 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5d8q" event={"ID":"455581bc-2151-467c-b96d-224a9466c85a","Type":"ContainerDied","Data":"397c5dba1a43649e1040a6ce660e19000a3a75752444f7168000de8aa2572a03"} Sep 30 20:49:27 crc kubenswrapper[4919]: I0930 20:49:27.559830 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5d8q" event={"ID":"455581bc-2151-467c-b96d-224a9466c85a","Type":"ContainerStarted","Data":"72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e"} Sep 30 20:49:27 crc kubenswrapper[4919]: I0930 20:49:27.583684 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c5d8q" podStartSLOduration=2.928956631 podStartE2EDuration="5.583661773s" podCreationTimestamp="2025-09-30 20:49:22 +0000 UTC" firstStartedPulling="2025-09-30 20:49:24.512775401 +0000 UTC m=+2149.628808558" lastFinishedPulling="2025-09-30 20:49:27.167480573 +0000 UTC m=+2152.283513700" observedRunningTime="2025-09-30 20:49:27.581306955 +0000 UTC m=+2152.697340092" watchObservedRunningTime="2025-09-30 20:49:27.583661773 +0000 UTC m=+2152.699694900" Sep 30 20:49:30 crc kubenswrapper[4919]: I0930 20:49:30.587261 4919 generic.go:334] "Generic (PLEG): container finished" podID="64b3c647-b4dd-4f5f-9ddc-001dd913c43c" containerID="47f5b5437d29c6b3edac998f1c1c6521e2b66c70681127ac49a64ee0295c6734" exitCode=0 Sep 30 20:49:30 crc kubenswrapper[4919]: I0930 20:49:30.587356 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" event={"ID":"64b3c647-b4dd-4f5f-9ddc-001dd913c43c","Type":"ContainerDied","Data":"47f5b5437d29c6b3edac998f1c1c6521e2b66c70681127ac49a64ee0295c6734"} Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.020239 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.050084 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-ssh-key\") pod \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.050139 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-inventory\") pod \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.050204 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-neutron-ovn-metadata-agent-neutron-config-0\") pod \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.050346 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-nova-metadata-neutron-config-0\") pod \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.050422 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x52nj\" (UniqueName: \"kubernetes.io/projected/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-kube-api-access-x52nj\") pod \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.050575 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-neutron-metadata-combined-ca-bundle\") pod \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\" (UID: \"64b3c647-b4dd-4f5f-9ddc-001dd913c43c\") " Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.057356 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "64b3c647-b4dd-4f5f-9ddc-001dd913c43c" (UID: "64b3c647-b4dd-4f5f-9ddc-001dd913c43c"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.057362 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-kube-api-access-x52nj" (OuterVolumeSpecName: "kube-api-access-x52nj") pod "64b3c647-b4dd-4f5f-9ddc-001dd913c43c" (UID: "64b3c647-b4dd-4f5f-9ddc-001dd913c43c"). InnerVolumeSpecName "kube-api-access-x52nj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.078749 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "64b3c647-b4dd-4f5f-9ddc-001dd913c43c" (UID: "64b3c647-b4dd-4f5f-9ddc-001dd913c43c"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.081557 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "64b3c647-b4dd-4f5f-9ddc-001dd913c43c" (UID: "64b3c647-b4dd-4f5f-9ddc-001dd913c43c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.084717 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "64b3c647-b4dd-4f5f-9ddc-001dd913c43c" (UID: "64b3c647-b4dd-4f5f-9ddc-001dd913c43c"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.091497 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-inventory" (OuterVolumeSpecName: "inventory") pod "64b3c647-b4dd-4f5f-9ddc-001dd913c43c" (UID: "64b3c647-b4dd-4f5f-9ddc-001dd913c43c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.153008 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.153043 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.153055 4919 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.153070 4919 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.153084 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x52nj\" (UniqueName: \"kubernetes.io/projected/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-kube-api-access-x52nj\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.153095 4919 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b3c647-b4dd-4f5f-9ddc-001dd913c43c-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.604420 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" event={"ID":"64b3c647-b4dd-4f5f-9ddc-001dd913c43c","Type":"ContainerDied","Data":"ad82df6b6b88c9351f367e988680a4daa607a40be1f8892721c3bb53c4e76d04"} Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.605194 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad82df6b6b88c9351f367e988680a4daa607a40be1f8892721c3bb53c4e76d04" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.604520 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.704910 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp"] Sep 30 20:49:32 crc kubenswrapper[4919]: E0930 20:49:32.705616 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b3c647-b4dd-4f5f-9ddc-001dd913c43c" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.705650 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b3c647-b4dd-4f5f-9ddc-001dd913c43c" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.705938 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b3c647-b4dd-4f5f-9ddc-001dd913c43c" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.706875 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.709371 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.709651 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.710028 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.710394 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.712739 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.714544 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp"] Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.761138 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zgwv\" (UniqueName: \"kubernetes.io/projected/dba87a6c-f6b5-4ef3-920a-fa94968c3602-kube-api-access-8zgwv\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.761200 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.761266 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.761304 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.761424 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.836991 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.837148 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.863207 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.863394 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zgwv\" (UniqueName: \"kubernetes.io/projected/dba87a6c-f6b5-4ef3-920a-fa94968c3602-kube-api-access-8zgwv\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.863431 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.863476 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.863509 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.868093 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.869482 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.869536 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.869641 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.880789 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zgwv\" (UniqueName: \"kubernetes.io/projected/dba87a6c-f6b5-4ef3-920a-fa94968c3602-kube-api-access-8zgwv\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:32 crc kubenswrapper[4919]: I0930 20:49:32.892594 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:33 crc kubenswrapper[4919]: I0930 20:49:33.039726 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:49:33 crc kubenswrapper[4919]: I0930 20:49:33.655015 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp"] Sep 30 20:49:33 crc kubenswrapper[4919]: I0930 20:49:33.678298 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:33 crc kubenswrapper[4919]: I0930 20:49:33.750342 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c5d8q"] Sep 30 20:49:34 crc kubenswrapper[4919]: I0930 20:49:34.634330 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" event={"ID":"dba87a6c-f6b5-4ef3-920a-fa94968c3602","Type":"ContainerStarted","Data":"94f379fe13f95b6e1214d851e45c0eee75ae7bf32a12ee196a2dd8be99985c3f"} Sep 30 20:49:34 crc kubenswrapper[4919]: I0930 20:49:34.634799 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" event={"ID":"dba87a6c-f6b5-4ef3-920a-fa94968c3602","Type":"ContainerStarted","Data":"b879f83cfc543c8e787934b980329c362036a0ff33a41f18e66e061b71915b1b"} Sep 30 20:49:34 crc kubenswrapper[4919]: I0930 20:49:34.668784 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" podStartSLOduration=2.122693866 podStartE2EDuration="2.668757315s" podCreationTimestamp="2025-09-30 20:49:32 +0000 UTC" firstStartedPulling="2025-09-30 20:49:33.668692479 +0000 UTC m=+2158.784725626" lastFinishedPulling="2025-09-30 20:49:34.214755918 +0000 UTC m=+2159.330789075" observedRunningTime="2025-09-30 20:49:34.662927586 +0000 UTC m=+2159.778960733" watchObservedRunningTime="2025-09-30 20:49:34.668757315 +0000 UTC m=+2159.784790462" Sep 30 20:49:35 crc kubenswrapper[4919]: I0930 20:49:35.667007 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c5d8q" podUID="455581bc-2151-467c-b96d-224a9466c85a" containerName="registry-server" containerID="cri-o://72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e" gracePeriod=2 Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.190786 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.235442 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8q5z\" (UniqueName: \"kubernetes.io/projected/455581bc-2151-467c-b96d-224a9466c85a-kube-api-access-k8q5z\") pod \"455581bc-2151-467c-b96d-224a9466c85a\" (UID: \"455581bc-2151-467c-b96d-224a9466c85a\") " Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.235504 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/455581bc-2151-467c-b96d-224a9466c85a-utilities\") pod \"455581bc-2151-467c-b96d-224a9466c85a\" (UID: \"455581bc-2151-467c-b96d-224a9466c85a\") " Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.235612 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/455581bc-2151-467c-b96d-224a9466c85a-catalog-content\") pod \"455581bc-2151-467c-b96d-224a9466c85a\" (UID: \"455581bc-2151-467c-b96d-224a9466c85a\") " Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.236764 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/455581bc-2151-467c-b96d-224a9466c85a-utilities" (OuterVolumeSpecName: "utilities") pod "455581bc-2151-467c-b96d-224a9466c85a" (UID: "455581bc-2151-467c-b96d-224a9466c85a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.242409 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/455581bc-2151-467c-b96d-224a9466c85a-kube-api-access-k8q5z" (OuterVolumeSpecName: "kube-api-access-k8q5z") pod "455581bc-2151-467c-b96d-224a9466c85a" (UID: "455581bc-2151-467c-b96d-224a9466c85a"). InnerVolumeSpecName "kube-api-access-k8q5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.343372 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8q5z\" (UniqueName: \"kubernetes.io/projected/455581bc-2151-467c-b96d-224a9466c85a-kube-api-access-k8q5z\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.343407 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/455581bc-2151-467c-b96d-224a9466c85a-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.678294 4919 generic.go:334] "Generic (PLEG): container finished" podID="455581bc-2151-467c-b96d-224a9466c85a" containerID="72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e" exitCode=0 Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.678364 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c5d8q" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.678429 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5d8q" event={"ID":"455581bc-2151-467c-b96d-224a9466c85a","Type":"ContainerDied","Data":"72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e"} Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.678885 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5d8q" event={"ID":"455581bc-2151-467c-b96d-224a9466c85a","Type":"ContainerDied","Data":"b0d272a7732f750950853600f0ba2624a15d541cd49628499be60abfd7888a37"} Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.678920 4919 scope.go:117] "RemoveContainer" containerID="72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.703833 4919 scope.go:117] "RemoveContainer" containerID="397c5dba1a43649e1040a6ce660e19000a3a75752444f7168000de8aa2572a03" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.736250 4919 scope.go:117] "RemoveContainer" containerID="955aba0a1c996959669a7eded4985214501eeb0d18847aecc522256d99e4cc4f" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.767963 4919 scope.go:117] "RemoveContainer" containerID="72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e" Sep 30 20:49:36 crc kubenswrapper[4919]: E0930 20:49:36.768469 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e\": container with ID starting with 72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e not found: ID does not exist" containerID="72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.768495 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e"} err="failed to get container status \"72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e\": rpc error: code = NotFound desc = could not find container \"72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e\": container with ID starting with 72f4a7cbc269293e92e0d87ace8d7abdebc707b41e643a2cac0f2141a2b05b0e not found: ID does not exist" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.768514 4919 scope.go:117] "RemoveContainer" containerID="397c5dba1a43649e1040a6ce660e19000a3a75752444f7168000de8aa2572a03" Sep 30 20:49:36 crc kubenswrapper[4919]: E0930 20:49:36.768823 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"397c5dba1a43649e1040a6ce660e19000a3a75752444f7168000de8aa2572a03\": container with ID starting with 397c5dba1a43649e1040a6ce660e19000a3a75752444f7168000de8aa2572a03 not found: ID does not exist" containerID="397c5dba1a43649e1040a6ce660e19000a3a75752444f7168000de8aa2572a03" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.768843 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"397c5dba1a43649e1040a6ce660e19000a3a75752444f7168000de8aa2572a03"} err="failed to get container status \"397c5dba1a43649e1040a6ce660e19000a3a75752444f7168000de8aa2572a03\": rpc error: code = NotFound desc = could not find container \"397c5dba1a43649e1040a6ce660e19000a3a75752444f7168000de8aa2572a03\": container with ID starting with 397c5dba1a43649e1040a6ce660e19000a3a75752444f7168000de8aa2572a03 not found: ID does not exist" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.768855 4919 scope.go:117] "RemoveContainer" containerID="955aba0a1c996959669a7eded4985214501eeb0d18847aecc522256d99e4cc4f" Sep 30 20:49:36 crc kubenswrapper[4919]: E0930 20:49:36.769148 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"955aba0a1c996959669a7eded4985214501eeb0d18847aecc522256d99e4cc4f\": container with ID starting with 955aba0a1c996959669a7eded4985214501eeb0d18847aecc522256d99e4cc4f not found: ID does not exist" containerID="955aba0a1c996959669a7eded4985214501eeb0d18847aecc522256d99e4cc4f" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.769166 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"955aba0a1c996959669a7eded4985214501eeb0d18847aecc522256d99e4cc4f"} err="failed to get container status \"955aba0a1c996959669a7eded4985214501eeb0d18847aecc522256d99e4cc4f\": rpc error: code = NotFound desc = could not find container \"955aba0a1c996959669a7eded4985214501eeb0d18847aecc522256d99e4cc4f\": container with ID starting with 955aba0a1c996959669a7eded4985214501eeb0d18847aecc522256d99e4cc4f not found: ID does not exist" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.911118 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/455581bc-2151-467c-b96d-224a9466c85a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "455581bc-2151-467c-b96d-224a9466c85a" (UID: "455581bc-2151-467c-b96d-224a9466c85a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:49:36 crc kubenswrapper[4919]: I0930 20:49:36.958043 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/455581bc-2151-467c-b96d-224a9466c85a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:49:37 crc kubenswrapper[4919]: I0930 20:49:37.026093 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c5d8q"] Sep 30 20:49:37 crc kubenswrapper[4919]: I0930 20:49:37.046907 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c5d8q"] Sep 30 20:49:37 crc kubenswrapper[4919]: I0930 20:49:37.655091 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="455581bc-2151-467c-b96d-224a9466c85a" path="/var/lib/kubelet/pods/455581bc-2151-467c-b96d-224a9466c85a/volumes" Sep 30 20:49:56 crc kubenswrapper[4919]: I0930 20:49:56.062162 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:49:56 crc kubenswrapper[4919]: I0930 20:49:56.062716 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:50:26 crc kubenswrapper[4919]: I0930 20:50:26.061933 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:50:26 crc kubenswrapper[4919]: I0930 20:50:26.062552 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:50:26 crc kubenswrapper[4919]: I0930 20:50:26.062608 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:50:26 crc kubenswrapper[4919]: I0930 20:50:26.063887 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a"} pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:50:26 crc kubenswrapper[4919]: I0930 20:50:26.063946 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" containerID="cri-o://4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" gracePeriod=600 Sep 30 20:50:26 crc kubenswrapper[4919]: E0930 20:50:26.211570 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:50:26 crc kubenswrapper[4919]: I0930 20:50:26.304552 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb371a63-6d82-453e-930e-656710b97f10" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" exitCode=0 Sep 30 20:50:26 crc kubenswrapper[4919]: I0930 20:50:26.304617 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerDied","Data":"4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a"} Sep 30 20:50:26 crc kubenswrapper[4919]: I0930 20:50:26.304701 4919 scope.go:117] "RemoveContainer" containerID="9d3002085d98ad4d2cc90f8e71e2652fb9ade38e472b41de06b7d6dc5a2524c9" Sep 30 20:50:26 crc kubenswrapper[4919]: I0930 20:50:26.306683 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:50:26 crc kubenswrapper[4919]: E0930 20:50:26.307407 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:50:39 crc kubenswrapper[4919]: I0930 20:50:39.632925 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:50:39 crc kubenswrapper[4919]: E0930 20:50:39.634002 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:50:51 crc kubenswrapper[4919]: I0930 20:50:51.632380 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:50:51 crc kubenswrapper[4919]: E0930 20:50:51.633304 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:51:06 crc kubenswrapper[4919]: I0930 20:51:06.632818 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:51:06 crc kubenswrapper[4919]: E0930 20:51:06.633659 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:51:21 crc kubenswrapper[4919]: I0930 20:51:21.635265 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:51:21 crc kubenswrapper[4919]: E0930 20:51:21.637639 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:51:36 crc kubenswrapper[4919]: I0930 20:51:36.632552 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:51:36 crc kubenswrapper[4919]: E0930 20:51:36.633597 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:51:49 crc kubenswrapper[4919]: I0930 20:51:49.634144 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:51:49 crc kubenswrapper[4919]: E0930 20:51:49.635535 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:52:02 crc kubenswrapper[4919]: I0930 20:52:02.631854 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:52:02 crc kubenswrapper[4919]: E0930 20:52:02.632787 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:52:13 crc kubenswrapper[4919]: I0930 20:52:13.633142 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:52:13 crc kubenswrapper[4919]: E0930 20:52:13.634064 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:52:27 crc kubenswrapper[4919]: I0930 20:52:27.633143 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:52:27 crc kubenswrapper[4919]: E0930 20:52:27.634053 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:52:40 crc kubenswrapper[4919]: I0930 20:52:40.632759 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:52:40 crc kubenswrapper[4919]: E0930 20:52:40.633527 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:52:52 crc kubenswrapper[4919]: I0930 20:52:52.633160 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:52:52 crc kubenswrapper[4919]: E0930 20:52:52.634473 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:53:04 crc kubenswrapper[4919]: I0930 20:53:04.632638 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:53:04 crc kubenswrapper[4919]: E0930 20:53:04.633243 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:53:19 crc kubenswrapper[4919]: I0930 20:53:19.632819 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:53:19 crc kubenswrapper[4919]: E0930 20:53:19.633749 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:53:30 crc kubenswrapper[4919]: I0930 20:53:30.632434 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:53:30 crc kubenswrapper[4919]: E0930 20:53:30.633435 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:53:45 crc kubenswrapper[4919]: I0930 20:53:45.640192 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:53:45 crc kubenswrapper[4919]: E0930 20:53:45.641244 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:53:54 crc kubenswrapper[4919]: I0930 20:53:54.498793 4919 generic.go:334] "Generic (PLEG): container finished" podID="dba87a6c-f6b5-4ef3-920a-fa94968c3602" containerID="94f379fe13f95b6e1214d851e45c0eee75ae7bf32a12ee196a2dd8be99985c3f" exitCode=0 Sep 30 20:53:54 crc kubenswrapper[4919]: I0930 20:53:54.498842 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" event={"ID":"dba87a6c-f6b5-4ef3-920a-fa94968c3602","Type":"ContainerDied","Data":"94f379fe13f95b6e1214d851e45c0eee75ae7bf32a12ee196a2dd8be99985c3f"} Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.012717 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.126916 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-ssh-key\") pod \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.126970 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-libvirt-combined-ca-bundle\") pod \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.127116 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zgwv\" (UniqueName: \"kubernetes.io/projected/dba87a6c-f6b5-4ef3-920a-fa94968c3602-kube-api-access-8zgwv\") pod \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.127134 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-inventory\") pod \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.128076 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-libvirt-secret-0\") pod \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\" (UID: \"dba87a6c-f6b5-4ef3-920a-fa94968c3602\") " Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.138622 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "dba87a6c-f6b5-4ef3-920a-fa94968c3602" (UID: "dba87a6c-f6b5-4ef3-920a-fa94968c3602"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.139170 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dba87a6c-f6b5-4ef3-920a-fa94968c3602-kube-api-access-8zgwv" (OuterVolumeSpecName: "kube-api-access-8zgwv") pod "dba87a6c-f6b5-4ef3-920a-fa94968c3602" (UID: "dba87a6c-f6b5-4ef3-920a-fa94968c3602"). InnerVolumeSpecName "kube-api-access-8zgwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.167772 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-inventory" (OuterVolumeSpecName: "inventory") pod "dba87a6c-f6b5-4ef3-920a-fa94968c3602" (UID: "dba87a6c-f6b5-4ef3-920a-fa94968c3602"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.168548 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "dba87a6c-f6b5-4ef3-920a-fa94968c3602" (UID: "dba87a6c-f6b5-4ef3-920a-fa94968c3602"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.173514 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "dba87a6c-f6b5-4ef3-920a-fa94968c3602" (UID: "dba87a6c-f6b5-4ef3-920a-fa94968c3602"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.231007 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zgwv\" (UniqueName: \"kubernetes.io/projected/dba87a6c-f6b5-4ef3-920a-fa94968c3602-kube-api-access-8zgwv\") on node \"crc\" DevicePath \"\"" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.231079 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.231105 4919 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.231123 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.231142 4919 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dba87a6c-f6b5-4ef3-920a-fa94968c3602-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.517589 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" event={"ID":"dba87a6c-f6b5-4ef3-920a-fa94968c3602","Type":"ContainerDied","Data":"b879f83cfc543c8e787934b980329c362036a0ff33a41f18e66e061b71915b1b"} Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.517635 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b879f83cfc543c8e787934b980329c362036a0ff33a41f18e66e061b71915b1b" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.517659 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.645399 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx"] Sep 30 20:53:56 crc kubenswrapper[4919]: E0930 20:53:56.645986 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="455581bc-2151-467c-b96d-224a9466c85a" containerName="registry-server" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.646003 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="455581bc-2151-467c-b96d-224a9466c85a" containerName="registry-server" Sep 30 20:53:56 crc kubenswrapper[4919]: E0930 20:53:56.646039 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dba87a6c-f6b5-4ef3-920a-fa94968c3602" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.646049 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="dba87a6c-f6b5-4ef3-920a-fa94968c3602" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 20:53:56 crc kubenswrapper[4919]: E0930 20:53:56.646067 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="455581bc-2151-467c-b96d-224a9466c85a" containerName="extract-utilities" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.646076 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="455581bc-2151-467c-b96d-224a9466c85a" containerName="extract-utilities" Sep 30 20:53:56 crc kubenswrapper[4919]: E0930 20:53:56.646087 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="455581bc-2151-467c-b96d-224a9466c85a" containerName="extract-content" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.646095 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="455581bc-2151-467c-b96d-224a9466c85a" containerName="extract-content" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.646367 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="dba87a6c-f6b5-4ef3-920a-fa94968c3602" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.646395 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="455581bc-2151-467c-b96d-224a9466c85a" containerName="registry-server" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.647252 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.649931 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.652128 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx"] Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.665512 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.665745 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.666417 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.666572 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.666758 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.666885 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.741206 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.742286 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.742365 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.742527 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.742629 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.742868 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.742938 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.742971 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.743113 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqb55\" (UniqueName: \"kubernetes.io/projected/394bf41c-6bf1-40f4-af0c-41ba74713e03-kube-api-access-mqb55\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.845065 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.845146 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.845253 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.845311 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.845344 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.845434 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqb55\" (UniqueName: \"kubernetes.io/projected/394bf41c-6bf1-40f4-af0c-41ba74713e03-kube-api-access-mqb55\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.845518 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.845561 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.845599 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.847845 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.849473 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.849828 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.850183 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.850466 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.851642 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.854095 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.861781 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.864690 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqb55\" (UniqueName: \"kubernetes.io/projected/394bf41c-6bf1-40f4-af0c-41ba74713e03-kube-api-access-mqb55\") pod \"nova-edpm-deployment-openstack-edpm-ipam-j4ggx\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:56 crc kubenswrapper[4919]: I0930 20:53:56.997815 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:53:57 crc kubenswrapper[4919]: I0930 20:53:57.539305 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx"] Sep 30 20:53:58 crc kubenswrapper[4919]: I0930 20:53:58.538772 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" event={"ID":"394bf41c-6bf1-40f4-af0c-41ba74713e03","Type":"ContainerStarted","Data":"bd31eb265f11e4995d955bac51751d0441a49b20eeecd3c496d7373105d9b9f6"} Sep 30 20:53:58 crc kubenswrapper[4919]: I0930 20:53:58.539406 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" event={"ID":"394bf41c-6bf1-40f4-af0c-41ba74713e03","Type":"ContainerStarted","Data":"ea8ef408d46e9c1770f27efad627e3acafd0f7715cdd0183d26816f6e43a36a1"} Sep 30 20:53:58 crc kubenswrapper[4919]: I0930 20:53:58.560020 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" podStartSLOduration=2.0638155400000002 podStartE2EDuration="2.56000313s" podCreationTimestamp="2025-09-30 20:53:56 +0000 UTC" firstStartedPulling="2025-09-30 20:53:57.527607243 +0000 UTC m=+2422.643640380" lastFinishedPulling="2025-09-30 20:53:58.023794833 +0000 UTC m=+2423.139827970" observedRunningTime="2025-09-30 20:53:58.557395755 +0000 UTC m=+2423.673428912" watchObservedRunningTime="2025-09-30 20:53:58.56000313 +0000 UTC m=+2423.676036257" Sep 30 20:54:00 crc kubenswrapper[4919]: I0930 20:54:00.631891 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:54:00 crc kubenswrapper[4919]: E0930 20:54:00.632619 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:54:11 crc kubenswrapper[4919]: I0930 20:54:11.632286 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:54:11 crc kubenswrapper[4919]: E0930 20:54:11.633408 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:54:24 crc kubenswrapper[4919]: I0930 20:54:24.632029 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:54:24 crc kubenswrapper[4919]: E0930 20:54:24.632954 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:54:35 crc kubenswrapper[4919]: I0930 20:54:35.640023 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:54:35 crc kubenswrapper[4919]: E0930 20:54:35.641128 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:54:46 crc kubenswrapper[4919]: I0930 20:54:46.633870 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:54:46 crc kubenswrapper[4919]: E0930 20:54:46.634673 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:54:59 crc kubenswrapper[4919]: I0930 20:54:59.632359 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:54:59 crc kubenswrapper[4919]: E0930 20:54:59.633403 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:55:11 crc kubenswrapper[4919]: I0930 20:55:11.632939 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:55:11 crc kubenswrapper[4919]: E0930 20:55:11.634207 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:55:24 crc kubenswrapper[4919]: I0930 20:55:24.632406 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:55:24 crc kubenswrapper[4919]: E0930 20:55:24.633557 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 20:55:38 crc kubenswrapper[4919]: I0930 20:55:38.632878 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:55:39 crc kubenswrapper[4919]: I0930 20:55:39.751327 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"e80e39ff8cf4c5d798c0a77b763a10876f1d8ee226789c307d8c40f2aedc19fa"} Sep 30 20:57:20 crc kubenswrapper[4919]: I0930 20:57:20.809450 4919 generic.go:334] "Generic (PLEG): container finished" podID="394bf41c-6bf1-40f4-af0c-41ba74713e03" containerID="bd31eb265f11e4995d955bac51751d0441a49b20eeecd3c496d7373105d9b9f6" exitCode=0 Sep 30 20:57:20 crc kubenswrapper[4919]: I0930 20:57:20.809904 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" event={"ID":"394bf41c-6bf1-40f4-af0c-41ba74713e03","Type":"ContainerDied","Data":"bd31eb265f11e4995d955bac51751d0441a49b20eeecd3c496d7373105d9b9f6"} Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.230848 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.366988 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-migration-ssh-key-0\") pod \"394bf41c-6bf1-40f4-af0c-41ba74713e03\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.367505 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-extra-config-0\") pod \"394bf41c-6bf1-40f4-af0c-41ba74713e03\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.367645 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-cell1-compute-config-1\") pod \"394bf41c-6bf1-40f4-af0c-41ba74713e03\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.367830 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-migration-ssh-key-1\") pod \"394bf41c-6bf1-40f4-af0c-41ba74713e03\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.368363 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqb55\" (UniqueName: \"kubernetes.io/projected/394bf41c-6bf1-40f4-af0c-41ba74713e03-kube-api-access-mqb55\") pod \"394bf41c-6bf1-40f4-af0c-41ba74713e03\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.368546 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-cell1-compute-config-0\") pod \"394bf41c-6bf1-40f4-af0c-41ba74713e03\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.368846 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-ssh-key\") pod \"394bf41c-6bf1-40f4-af0c-41ba74713e03\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.369005 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-combined-ca-bundle\") pod \"394bf41c-6bf1-40f4-af0c-41ba74713e03\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.369179 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-inventory\") pod \"394bf41c-6bf1-40f4-af0c-41ba74713e03\" (UID: \"394bf41c-6bf1-40f4-af0c-41ba74713e03\") " Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.374417 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "394bf41c-6bf1-40f4-af0c-41ba74713e03" (UID: "394bf41c-6bf1-40f4-af0c-41ba74713e03"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.375905 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/394bf41c-6bf1-40f4-af0c-41ba74713e03-kube-api-access-mqb55" (OuterVolumeSpecName: "kube-api-access-mqb55") pod "394bf41c-6bf1-40f4-af0c-41ba74713e03" (UID: "394bf41c-6bf1-40f4-af0c-41ba74713e03"). InnerVolumeSpecName "kube-api-access-mqb55". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.401530 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "394bf41c-6bf1-40f4-af0c-41ba74713e03" (UID: "394bf41c-6bf1-40f4-af0c-41ba74713e03"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.401966 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "394bf41c-6bf1-40f4-af0c-41ba74713e03" (UID: "394bf41c-6bf1-40f4-af0c-41ba74713e03"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.403429 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-inventory" (OuterVolumeSpecName: "inventory") pod "394bf41c-6bf1-40f4-af0c-41ba74713e03" (UID: "394bf41c-6bf1-40f4-af0c-41ba74713e03"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.407737 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "394bf41c-6bf1-40f4-af0c-41ba74713e03" (UID: "394bf41c-6bf1-40f4-af0c-41ba74713e03"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.408124 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "394bf41c-6bf1-40f4-af0c-41ba74713e03" (UID: "394bf41c-6bf1-40f4-af0c-41ba74713e03"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.421611 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "394bf41c-6bf1-40f4-af0c-41ba74713e03" (UID: "394bf41c-6bf1-40f4-af0c-41ba74713e03"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.426358 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "394bf41c-6bf1-40f4-af0c-41ba74713e03" (UID: "394bf41c-6bf1-40f4-af0c-41ba74713e03"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.472762 4919 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.472816 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.472838 4919 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.472860 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.472877 4919 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.472894 4919 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.472911 4919 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.472928 4919 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/394bf41c-6bf1-40f4-af0c-41ba74713e03-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.472945 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqb55\" (UniqueName: \"kubernetes.io/projected/394bf41c-6bf1-40f4-af0c-41ba74713e03-kube-api-access-mqb55\") on node \"crc\" DevicePath \"\"" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.834911 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" event={"ID":"394bf41c-6bf1-40f4-af0c-41ba74713e03","Type":"ContainerDied","Data":"ea8ef408d46e9c1770f27efad627e3acafd0f7715cdd0183d26816f6e43a36a1"} Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.834951 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea8ef408d46e9c1770f27efad627e3acafd0f7715cdd0183d26816f6e43a36a1" Sep 30 20:57:22 crc kubenswrapper[4919]: I0930 20:57:22.834988 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-j4ggx" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.025459 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4"] Sep 30 20:57:23 crc kubenswrapper[4919]: E0930 20:57:23.026040 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="394bf41c-6bf1-40f4-af0c-41ba74713e03" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.026055 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="394bf41c-6bf1-40f4-af0c-41ba74713e03" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.026257 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="394bf41c-6bf1-40f4-af0c-41ba74713e03" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.026836 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.028982 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.029195 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.029378 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-hznj2" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.029728 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.030016 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.085053 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4"] Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.187837 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.187909 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.188150 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.188309 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.188371 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62fvf\" (UniqueName: \"kubernetes.io/projected/e4e968a1-eb51-4c2e-9672-ff0a6f050948-kube-api-access-62fvf\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.188428 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.188538 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.290578 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.290660 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62fvf\" (UniqueName: \"kubernetes.io/projected/e4e968a1-eb51-4c2e-9672-ff0a6f050948-kube-api-access-62fvf\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.290723 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.290804 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.290894 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.290987 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.291973 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.297733 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.297958 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.298506 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.300041 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.310883 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.316345 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.324560 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62fvf\" (UniqueName: \"kubernetes.io/projected/e4e968a1-eb51-4c2e-9672-ff0a6f050948-kube-api-access-62fvf\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-fnww4\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.341409 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.928527 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4"] Sep 30 20:57:23 crc kubenswrapper[4919]: I0930 20:57:23.944123 4919 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 20:57:24 crc kubenswrapper[4919]: I0930 20:57:24.864830 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" event={"ID":"e4e968a1-eb51-4c2e-9672-ff0a6f050948","Type":"ContainerStarted","Data":"422483ac30a9fa2a191bef18fe7eca7dc8f77929e7af8fa2d7409bb19f1af7ef"} Sep 30 20:57:24 crc kubenswrapper[4919]: I0930 20:57:24.865456 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" event={"ID":"e4e968a1-eb51-4c2e-9672-ff0a6f050948","Type":"ContainerStarted","Data":"182abf408e15907f2c69b64e3272c925d8e8e346a2371e049b2d8c9d2991c70a"} Sep 30 20:57:24 crc kubenswrapper[4919]: I0930 20:57:24.925407 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" podStartSLOduration=1.2705650259999999 podStartE2EDuration="1.925387013s" podCreationTimestamp="2025-09-30 20:57:23 +0000 UTC" firstStartedPulling="2025-09-30 20:57:23.943857197 +0000 UTC m=+2629.059890314" lastFinishedPulling="2025-09-30 20:57:24.598679164 +0000 UTC m=+2629.714712301" observedRunningTime="2025-09-30 20:57:24.919780281 +0000 UTC m=+2630.035813438" watchObservedRunningTime="2025-09-30 20:57:24.925387013 +0000 UTC m=+2630.041420150" Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.451909 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gxl52"] Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.455307 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.475616 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gxl52"] Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.557873 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jz62\" (UniqueName: \"kubernetes.io/projected/3fe5c25a-c1a6-4b4e-8da9-53adde194331-kube-api-access-5jz62\") pod \"community-operators-gxl52\" (UID: \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\") " pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.558353 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fe5c25a-c1a6-4b4e-8da9-53adde194331-catalog-content\") pod \"community-operators-gxl52\" (UID: \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\") " pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.558646 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fe5c25a-c1a6-4b4e-8da9-53adde194331-utilities\") pod \"community-operators-gxl52\" (UID: \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\") " pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.660433 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jz62\" (UniqueName: \"kubernetes.io/projected/3fe5c25a-c1a6-4b4e-8da9-53adde194331-kube-api-access-5jz62\") pod \"community-operators-gxl52\" (UID: \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\") " pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.660544 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fe5c25a-c1a6-4b4e-8da9-53adde194331-catalog-content\") pod \"community-operators-gxl52\" (UID: \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\") " pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.660614 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fe5c25a-c1a6-4b4e-8da9-53adde194331-utilities\") pod \"community-operators-gxl52\" (UID: \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\") " pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.661156 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fe5c25a-c1a6-4b4e-8da9-53adde194331-utilities\") pod \"community-operators-gxl52\" (UID: \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\") " pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.661415 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fe5c25a-c1a6-4b4e-8da9-53adde194331-catalog-content\") pod \"community-operators-gxl52\" (UID: \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\") " pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.680819 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jz62\" (UniqueName: \"kubernetes.io/projected/3fe5c25a-c1a6-4b4e-8da9-53adde194331-kube-api-access-5jz62\") pod \"community-operators-gxl52\" (UID: \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\") " pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:45 crc kubenswrapper[4919]: I0930 20:57:45.793493 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:46 crc kubenswrapper[4919]: I0930 20:57:46.383676 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gxl52"] Sep 30 20:57:46 crc kubenswrapper[4919]: W0930 20:57:46.391053 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fe5c25a_c1a6_4b4e_8da9_53adde194331.slice/crio-75efb8c9883c5ab5dc9c9f5b1a24fc80c26fe59f40a7c8d72e2fa23a5be626a6 WatchSource:0}: Error finding container 75efb8c9883c5ab5dc9c9f5b1a24fc80c26fe59f40a7c8d72e2fa23a5be626a6: Status 404 returned error can't find the container with id 75efb8c9883c5ab5dc9c9f5b1a24fc80c26fe59f40a7c8d72e2fa23a5be626a6 Sep 30 20:57:47 crc kubenswrapper[4919]: I0930 20:57:47.134404 4919 generic.go:334] "Generic (PLEG): container finished" podID="3fe5c25a-c1a6-4b4e-8da9-53adde194331" containerID="bd24bcf063b3b9a3d57e158bec33ee867115358d243ad56fabadabc7c37afe8e" exitCode=0 Sep 30 20:57:47 crc kubenswrapper[4919]: I0930 20:57:47.134474 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gxl52" event={"ID":"3fe5c25a-c1a6-4b4e-8da9-53adde194331","Type":"ContainerDied","Data":"bd24bcf063b3b9a3d57e158bec33ee867115358d243ad56fabadabc7c37afe8e"} Sep 30 20:57:47 crc kubenswrapper[4919]: I0930 20:57:47.134515 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gxl52" event={"ID":"3fe5c25a-c1a6-4b4e-8da9-53adde194331","Type":"ContainerStarted","Data":"75efb8c9883c5ab5dc9c9f5b1a24fc80c26fe59f40a7c8d72e2fa23a5be626a6"} Sep 30 20:57:49 crc kubenswrapper[4919]: I0930 20:57:49.160900 4919 generic.go:334] "Generic (PLEG): container finished" podID="3fe5c25a-c1a6-4b4e-8da9-53adde194331" containerID="9d6348ceb3b821d19abfae3ee490c0170891aa55aeeb4ffcad85e50abd605438" exitCode=0 Sep 30 20:57:49 crc kubenswrapper[4919]: I0930 20:57:49.160994 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gxl52" event={"ID":"3fe5c25a-c1a6-4b4e-8da9-53adde194331","Type":"ContainerDied","Data":"9d6348ceb3b821d19abfae3ee490c0170891aa55aeeb4ffcad85e50abd605438"} Sep 30 20:57:52 crc kubenswrapper[4919]: I0930 20:57:52.197712 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gxl52" event={"ID":"3fe5c25a-c1a6-4b4e-8da9-53adde194331","Type":"ContainerStarted","Data":"28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed"} Sep 30 20:57:52 crc kubenswrapper[4919]: I0930 20:57:52.220406 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gxl52" podStartSLOduration=3.530550081 podStartE2EDuration="7.220388924s" podCreationTimestamp="2025-09-30 20:57:45 +0000 UTC" firstStartedPulling="2025-09-30 20:57:47.139189871 +0000 UTC m=+2652.255223028" lastFinishedPulling="2025-09-30 20:57:50.829028734 +0000 UTC m=+2655.945061871" observedRunningTime="2025-09-30 20:57:52.214315808 +0000 UTC m=+2657.330348955" watchObservedRunningTime="2025-09-30 20:57:52.220388924 +0000 UTC m=+2657.336422051" Sep 30 20:57:55 crc kubenswrapper[4919]: I0930 20:57:55.795413 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:55 crc kubenswrapper[4919]: I0930 20:57:55.795831 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:55 crc kubenswrapper[4919]: I0930 20:57:55.847398 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:56 crc kubenswrapper[4919]: I0930 20:57:56.062615 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:57:56 crc kubenswrapper[4919]: I0930 20:57:56.062679 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:57:56 crc kubenswrapper[4919]: I0930 20:57:56.293058 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:56 crc kubenswrapper[4919]: I0930 20:57:56.355309 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gxl52"] Sep 30 20:57:58 crc kubenswrapper[4919]: I0930 20:57:58.254899 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gxl52" podUID="3fe5c25a-c1a6-4b4e-8da9-53adde194331" containerName="registry-server" containerID="cri-o://28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed" gracePeriod=2 Sep 30 20:57:58 crc kubenswrapper[4919]: I0930 20:57:58.772258 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:58 crc kubenswrapper[4919]: I0930 20:57:58.862688 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fe5c25a-c1a6-4b4e-8da9-53adde194331-catalog-content\") pod \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\" (UID: \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\") " Sep 30 20:57:58 crc kubenswrapper[4919]: I0930 20:57:58.862814 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jz62\" (UniqueName: \"kubernetes.io/projected/3fe5c25a-c1a6-4b4e-8da9-53adde194331-kube-api-access-5jz62\") pod \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\" (UID: \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\") " Sep 30 20:57:58 crc kubenswrapper[4919]: I0930 20:57:58.862859 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fe5c25a-c1a6-4b4e-8da9-53adde194331-utilities\") pod \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\" (UID: \"3fe5c25a-c1a6-4b4e-8da9-53adde194331\") " Sep 30 20:57:58 crc kubenswrapper[4919]: I0930 20:57:58.864281 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fe5c25a-c1a6-4b4e-8da9-53adde194331-utilities" (OuterVolumeSpecName: "utilities") pod "3fe5c25a-c1a6-4b4e-8da9-53adde194331" (UID: "3fe5c25a-c1a6-4b4e-8da9-53adde194331"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:57:58 crc kubenswrapper[4919]: I0930 20:57:58.877307 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fe5c25a-c1a6-4b4e-8da9-53adde194331-kube-api-access-5jz62" (OuterVolumeSpecName: "kube-api-access-5jz62") pod "3fe5c25a-c1a6-4b4e-8da9-53adde194331" (UID: "3fe5c25a-c1a6-4b4e-8da9-53adde194331"). InnerVolumeSpecName "kube-api-access-5jz62". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:57:58 crc kubenswrapper[4919]: I0930 20:57:58.965728 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jz62\" (UniqueName: \"kubernetes.io/projected/3fe5c25a-c1a6-4b4e-8da9-53adde194331-kube-api-access-5jz62\") on node \"crc\" DevicePath \"\"" Sep 30 20:57:58 crc kubenswrapper[4919]: I0930 20:57:58.965765 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3fe5c25a-c1a6-4b4e-8da9-53adde194331-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.173374 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fe5c25a-c1a6-4b4e-8da9-53adde194331-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3fe5c25a-c1a6-4b4e-8da9-53adde194331" (UID: "3fe5c25a-c1a6-4b4e-8da9-53adde194331"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.263912 4919 generic.go:334] "Generic (PLEG): container finished" podID="3fe5c25a-c1a6-4b4e-8da9-53adde194331" containerID="28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed" exitCode=0 Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.263953 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gxl52" event={"ID":"3fe5c25a-c1a6-4b4e-8da9-53adde194331","Type":"ContainerDied","Data":"28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed"} Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.263991 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gxl52" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.264013 4919 scope.go:117] "RemoveContainer" containerID="28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.263999 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gxl52" event={"ID":"3fe5c25a-c1a6-4b4e-8da9-53adde194331","Type":"ContainerDied","Data":"75efb8c9883c5ab5dc9c9f5b1a24fc80c26fe59f40a7c8d72e2fa23a5be626a6"} Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.275253 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3fe5c25a-c1a6-4b4e-8da9-53adde194331-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.288776 4919 scope.go:117] "RemoveContainer" containerID="9d6348ceb3b821d19abfae3ee490c0170891aa55aeeb4ffcad85e50abd605438" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.295900 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gxl52"] Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.306169 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gxl52"] Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.326633 4919 scope.go:117] "RemoveContainer" containerID="bd24bcf063b3b9a3d57e158bec33ee867115358d243ad56fabadabc7c37afe8e" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.366115 4919 scope.go:117] "RemoveContainer" containerID="28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed" Sep 30 20:57:59 crc kubenswrapper[4919]: E0930 20:57:59.366811 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed\": container with ID starting with 28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed not found: ID does not exist" containerID="28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.370348 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed"} err="failed to get container status \"28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed\": rpc error: code = NotFound desc = could not find container \"28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed\": container with ID starting with 28ec225718489b6da3f87e7588b96c031a7f8400e3534998192ccb4fc6ce60ed not found: ID does not exist" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.370412 4919 scope.go:117] "RemoveContainer" containerID="9d6348ceb3b821d19abfae3ee490c0170891aa55aeeb4ffcad85e50abd605438" Sep 30 20:57:59 crc kubenswrapper[4919]: E0930 20:57:59.371184 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d6348ceb3b821d19abfae3ee490c0170891aa55aeeb4ffcad85e50abd605438\": container with ID starting with 9d6348ceb3b821d19abfae3ee490c0170891aa55aeeb4ffcad85e50abd605438 not found: ID does not exist" containerID="9d6348ceb3b821d19abfae3ee490c0170891aa55aeeb4ffcad85e50abd605438" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.371256 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d6348ceb3b821d19abfae3ee490c0170891aa55aeeb4ffcad85e50abd605438"} err="failed to get container status \"9d6348ceb3b821d19abfae3ee490c0170891aa55aeeb4ffcad85e50abd605438\": rpc error: code = NotFound desc = could not find container \"9d6348ceb3b821d19abfae3ee490c0170891aa55aeeb4ffcad85e50abd605438\": container with ID starting with 9d6348ceb3b821d19abfae3ee490c0170891aa55aeeb4ffcad85e50abd605438 not found: ID does not exist" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.371288 4919 scope.go:117] "RemoveContainer" containerID="bd24bcf063b3b9a3d57e158bec33ee867115358d243ad56fabadabc7c37afe8e" Sep 30 20:57:59 crc kubenswrapper[4919]: E0930 20:57:59.372595 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd24bcf063b3b9a3d57e158bec33ee867115358d243ad56fabadabc7c37afe8e\": container with ID starting with bd24bcf063b3b9a3d57e158bec33ee867115358d243ad56fabadabc7c37afe8e not found: ID does not exist" containerID="bd24bcf063b3b9a3d57e158bec33ee867115358d243ad56fabadabc7c37afe8e" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.372630 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd24bcf063b3b9a3d57e158bec33ee867115358d243ad56fabadabc7c37afe8e"} err="failed to get container status \"bd24bcf063b3b9a3d57e158bec33ee867115358d243ad56fabadabc7c37afe8e\": rpc error: code = NotFound desc = could not find container \"bd24bcf063b3b9a3d57e158bec33ee867115358d243ad56fabadabc7c37afe8e\": container with ID starting with bd24bcf063b3b9a3d57e158bec33ee867115358d243ad56fabadabc7c37afe8e not found: ID does not exist" Sep 30 20:57:59 crc kubenswrapper[4919]: I0930 20:57:59.643005 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fe5c25a-c1a6-4b4e-8da9-53adde194331" path="/var/lib/kubelet/pods/3fe5c25a-c1a6-4b4e-8da9-53adde194331/volumes" Sep 30 20:58:26 crc kubenswrapper[4919]: I0930 20:58:26.062551 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:58:26 crc kubenswrapper[4919]: I0930 20:58:26.063099 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.330272 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-czs77"] Sep 30 20:58:35 crc kubenswrapper[4919]: E0930 20:58:35.331527 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fe5c25a-c1a6-4b4e-8da9-53adde194331" containerName="extract-content" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.331549 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fe5c25a-c1a6-4b4e-8da9-53adde194331" containerName="extract-content" Sep 30 20:58:35 crc kubenswrapper[4919]: E0930 20:58:35.331604 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fe5c25a-c1a6-4b4e-8da9-53adde194331" containerName="extract-utilities" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.331615 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fe5c25a-c1a6-4b4e-8da9-53adde194331" containerName="extract-utilities" Sep 30 20:58:35 crc kubenswrapper[4919]: E0930 20:58:35.331634 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fe5c25a-c1a6-4b4e-8da9-53adde194331" containerName="registry-server" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.331645 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fe5c25a-c1a6-4b4e-8da9-53adde194331" containerName="registry-server" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.331949 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fe5c25a-c1a6-4b4e-8da9-53adde194331" containerName="registry-server" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.334127 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.348205 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-czs77"] Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.384658 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mkk4\" (UniqueName: \"kubernetes.io/projected/7b924b33-204f-415e-84ab-4d6762538399-kube-api-access-4mkk4\") pod \"redhat-operators-czs77\" (UID: \"7b924b33-204f-415e-84ab-4d6762538399\") " pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.384845 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b924b33-204f-415e-84ab-4d6762538399-utilities\") pod \"redhat-operators-czs77\" (UID: \"7b924b33-204f-415e-84ab-4d6762538399\") " pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.384949 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b924b33-204f-415e-84ab-4d6762538399-catalog-content\") pod \"redhat-operators-czs77\" (UID: \"7b924b33-204f-415e-84ab-4d6762538399\") " pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.486592 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b924b33-204f-415e-84ab-4d6762538399-utilities\") pod \"redhat-operators-czs77\" (UID: \"7b924b33-204f-415e-84ab-4d6762538399\") " pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.486919 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b924b33-204f-415e-84ab-4d6762538399-catalog-content\") pod \"redhat-operators-czs77\" (UID: \"7b924b33-204f-415e-84ab-4d6762538399\") " pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.487072 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mkk4\" (UniqueName: \"kubernetes.io/projected/7b924b33-204f-415e-84ab-4d6762538399-kube-api-access-4mkk4\") pod \"redhat-operators-czs77\" (UID: \"7b924b33-204f-415e-84ab-4d6762538399\") " pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.487149 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b924b33-204f-415e-84ab-4d6762538399-utilities\") pod \"redhat-operators-czs77\" (UID: \"7b924b33-204f-415e-84ab-4d6762538399\") " pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.487500 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b924b33-204f-415e-84ab-4d6762538399-catalog-content\") pod \"redhat-operators-czs77\" (UID: \"7b924b33-204f-415e-84ab-4d6762538399\") " pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.512788 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mkk4\" (UniqueName: \"kubernetes.io/projected/7b924b33-204f-415e-84ab-4d6762538399-kube-api-access-4mkk4\") pod \"redhat-operators-czs77\" (UID: \"7b924b33-204f-415e-84ab-4d6762538399\") " pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:35 crc kubenswrapper[4919]: I0930 20:58:35.659031 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:36 crc kubenswrapper[4919]: W0930 20:58:36.200459 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b924b33_204f_415e_84ab_4d6762538399.slice/crio-b553afd5ca69bfb909c1e695ce1f23bf636a48391f769f67c126ff5b36e3032c WatchSource:0}: Error finding container b553afd5ca69bfb909c1e695ce1f23bf636a48391f769f67c126ff5b36e3032c: Status 404 returned error can't find the container with id b553afd5ca69bfb909c1e695ce1f23bf636a48391f769f67c126ff5b36e3032c Sep 30 20:58:36 crc kubenswrapper[4919]: I0930 20:58:36.202568 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-czs77"] Sep 30 20:58:36 crc kubenswrapper[4919]: I0930 20:58:36.646853 4919 generic.go:334] "Generic (PLEG): container finished" podID="7b924b33-204f-415e-84ab-4d6762538399" containerID="6cd03ef2b970b8e632d19f4a7988f753345de22f90ef2ef3bff0eea5f34a0570" exitCode=0 Sep 30 20:58:36 crc kubenswrapper[4919]: I0930 20:58:36.646918 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-czs77" event={"ID":"7b924b33-204f-415e-84ab-4d6762538399","Type":"ContainerDied","Data":"6cd03ef2b970b8e632d19f4a7988f753345de22f90ef2ef3bff0eea5f34a0570"} Sep 30 20:58:36 crc kubenswrapper[4919]: I0930 20:58:36.647711 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-czs77" event={"ID":"7b924b33-204f-415e-84ab-4d6762538399","Type":"ContainerStarted","Data":"b553afd5ca69bfb909c1e695ce1f23bf636a48391f769f67c126ff5b36e3032c"} Sep 30 20:58:39 crc kubenswrapper[4919]: I0930 20:58:39.685777 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-czs77" event={"ID":"7b924b33-204f-415e-84ab-4d6762538399","Type":"ContainerStarted","Data":"40d506b5fa00ac502b47867c8bcbea72662a013e7dc40d40307634cfe0a5f795"} Sep 30 20:58:40 crc kubenswrapper[4919]: I0930 20:58:40.720065 4919 generic.go:334] "Generic (PLEG): container finished" podID="7b924b33-204f-415e-84ab-4d6762538399" containerID="40d506b5fa00ac502b47867c8bcbea72662a013e7dc40d40307634cfe0a5f795" exitCode=0 Sep 30 20:58:40 crc kubenswrapper[4919]: I0930 20:58:40.720110 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-czs77" event={"ID":"7b924b33-204f-415e-84ab-4d6762538399","Type":"ContainerDied","Data":"40d506b5fa00ac502b47867c8bcbea72662a013e7dc40d40307634cfe0a5f795"} Sep 30 20:58:41 crc kubenswrapper[4919]: I0930 20:58:41.733205 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-czs77" event={"ID":"7b924b33-204f-415e-84ab-4d6762538399","Type":"ContainerStarted","Data":"9aabf3368c9473c211c0b51da4767012bc6bf60a5546053d3dccc03d02730859"} Sep 30 20:58:41 crc kubenswrapper[4919]: I0930 20:58:41.751952 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-czs77" podStartSLOduration=2.741751416 podStartE2EDuration="6.751934494s" podCreationTimestamp="2025-09-30 20:58:35 +0000 UTC" firstStartedPulling="2025-09-30 20:58:36.649308901 +0000 UTC m=+2701.765342028" lastFinishedPulling="2025-09-30 20:58:40.659491979 +0000 UTC m=+2705.775525106" observedRunningTime="2025-09-30 20:58:41.750959515 +0000 UTC m=+2706.866992662" watchObservedRunningTime="2025-09-30 20:58:41.751934494 +0000 UTC m=+2706.867967621" Sep 30 20:58:45 crc kubenswrapper[4919]: I0930 20:58:45.659157 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:45 crc kubenswrapper[4919]: I0930 20:58:45.660320 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:45 crc kubenswrapper[4919]: I0930 20:58:45.712569 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:45 crc kubenswrapper[4919]: I0930 20:58:45.821562 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:46 crc kubenswrapper[4919]: I0930 20:58:46.524154 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-czs77"] Sep 30 20:58:47 crc kubenswrapper[4919]: I0930 20:58:47.795294 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-czs77" podUID="7b924b33-204f-415e-84ab-4d6762538399" containerName="registry-server" containerID="cri-o://9aabf3368c9473c211c0b51da4767012bc6bf60a5546053d3dccc03d02730859" gracePeriod=2 Sep 30 20:58:48 crc kubenswrapper[4919]: I0930 20:58:48.807445 4919 generic.go:334] "Generic (PLEG): container finished" podID="7b924b33-204f-415e-84ab-4d6762538399" containerID="9aabf3368c9473c211c0b51da4767012bc6bf60a5546053d3dccc03d02730859" exitCode=0 Sep 30 20:58:48 crc kubenswrapper[4919]: I0930 20:58:48.807502 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-czs77" event={"ID":"7b924b33-204f-415e-84ab-4d6762538399","Type":"ContainerDied","Data":"9aabf3368c9473c211c0b51da4767012bc6bf60a5546053d3dccc03d02730859"} Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.698735 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.791951 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b924b33-204f-415e-84ab-4d6762538399-utilities\") pod \"7b924b33-204f-415e-84ab-4d6762538399\" (UID: \"7b924b33-204f-415e-84ab-4d6762538399\") " Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.792164 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b924b33-204f-415e-84ab-4d6762538399-catalog-content\") pod \"7b924b33-204f-415e-84ab-4d6762538399\" (UID: \"7b924b33-204f-415e-84ab-4d6762538399\") " Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.792319 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mkk4\" (UniqueName: \"kubernetes.io/projected/7b924b33-204f-415e-84ab-4d6762538399-kube-api-access-4mkk4\") pod \"7b924b33-204f-415e-84ab-4d6762538399\" (UID: \"7b924b33-204f-415e-84ab-4d6762538399\") " Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.793202 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b924b33-204f-415e-84ab-4d6762538399-utilities" (OuterVolumeSpecName: "utilities") pod "7b924b33-204f-415e-84ab-4d6762538399" (UID: "7b924b33-204f-415e-84ab-4d6762538399"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.798747 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b924b33-204f-415e-84ab-4d6762538399-kube-api-access-4mkk4" (OuterVolumeSpecName: "kube-api-access-4mkk4") pod "7b924b33-204f-415e-84ab-4d6762538399" (UID: "7b924b33-204f-415e-84ab-4d6762538399"). InnerVolumeSpecName "kube-api-access-4mkk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.818842 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-czs77" event={"ID":"7b924b33-204f-415e-84ab-4d6762538399","Type":"ContainerDied","Data":"b553afd5ca69bfb909c1e695ce1f23bf636a48391f769f67c126ff5b36e3032c"} Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.818891 4919 scope.go:117] "RemoveContainer" containerID="9aabf3368c9473c211c0b51da4767012bc6bf60a5546053d3dccc03d02730859" Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.818928 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-czs77" Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.866356 4919 scope.go:117] "RemoveContainer" containerID="40d506b5fa00ac502b47867c8bcbea72662a013e7dc40d40307634cfe0a5f795" Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.890765 4919 scope.go:117] "RemoveContainer" containerID="6cd03ef2b970b8e632d19f4a7988f753345de22f90ef2ef3bff0eea5f34a0570" Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.894999 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mkk4\" (UniqueName: \"kubernetes.io/projected/7b924b33-204f-415e-84ab-4d6762538399-kube-api-access-4mkk4\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.895022 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b924b33-204f-415e-84ab-4d6762538399-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.909667 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b924b33-204f-415e-84ab-4d6762538399-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7b924b33-204f-415e-84ab-4d6762538399" (UID: "7b924b33-204f-415e-84ab-4d6762538399"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:58:49 crc kubenswrapper[4919]: I0930 20:58:49.996416 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b924b33-204f-415e-84ab-4d6762538399-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:58:50 crc kubenswrapper[4919]: I0930 20:58:50.163372 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-czs77"] Sep 30 20:58:50 crc kubenswrapper[4919]: I0930 20:58:50.173956 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-czs77"] Sep 30 20:58:51 crc kubenswrapper[4919]: I0930 20:58:51.646105 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b924b33-204f-415e-84ab-4d6762538399" path="/var/lib/kubelet/pods/7b924b33-204f-415e-84ab-4d6762538399/volumes" Sep 30 20:58:56 crc kubenswrapper[4919]: I0930 20:58:56.061638 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 20:58:56 crc kubenswrapper[4919]: I0930 20:58:56.062310 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 20:58:56 crc kubenswrapper[4919]: I0930 20:58:56.062381 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 20:58:56 crc kubenswrapper[4919]: I0930 20:58:56.063337 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e80e39ff8cf4c5d798c0a77b763a10876f1d8ee226789c307d8c40f2aedc19fa"} pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 20:58:56 crc kubenswrapper[4919]: I0930 20:58:56.063442 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" containerID="cri-o://e80e39ff8cf4c5d798c0a77b763a10876f1d8ee226789c307d8c40f2aedc19fa" gracePeriod=600 Sep 30 20:58:56 crc kubenswrapper[4919]: E0930 20:58:56.424865 4919 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb371a63_6d82_453e_930e_656710b97f10.slice/crio-conmon-e80e39ff8cf4c5d798c0a77b763a10876f1d8ee226789c307d8c40f2aedc19fa.scope\": RecentStats: unable to find data in memory cache]" Sep 30 20:58:56 crc kubenswrapper[4919]: I0930 20:58:56.920351 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb371a63-6d82-453e-930e-656710b97f10" containerID="e80e39ff8cf4c5d798c0a77b763a10876f1d8ee226789c307d8c40f2aedc19fa" exitCode=0 Sep 30 20:58:56 crc kubenswrapper[4919]: I0930 20:58:56.920386 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerDied","Data":"e80e39ff8cf4c5d798c0a77b763a10876f1d8ee226789c307d8c40f2aedc19fa"} Sep 30 20:58:56 crc kubenswrapper[4919]: I0930 20:58:56.920764 4919 scope.go:117] "RemoveContainer" containerID="4d0e82ac8c6bffcc37fcd1317e499fc3c540f02f4d0fe5f3b8358b0e2a728f4a" Sep 30 20:58:57 crc kubenswrapper[4919]: I0930 20:58:57.944903 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5"} Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.765333 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-74qdp"] Sep 30 20:59:39 crc kubenswrapper[4919]: E0930 20:59:39.766049 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b924b33-204f-415e-84ab-4d6762538399" containerName="registry-server" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.766062 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b924b33-204f-415e-84ab-4d6762538399" containerName="registry-server" Sep 30 20:59:39 crc kubenswrapper[4919]: E0930 20:59:39.766092 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b924b33-204f-415e-84ab-4d6762538399" containerName="extract-content" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.766097 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b924b33-204f-415e-84ab-4d6762538399" containerName="extract-content" Sep 30 20:59:39 crc kubenswrapper[4919]: E0930 20:59:39.766113 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b924b33-204f-415e-84ab-4d6762538399" containerName="extract-utilities" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.766119 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b924b33-204f-415e-84ab-4d6762538399" containerName="extract-utilities" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.766354 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b924b33-204f-415e-84ab-4d6762538399" containerName="registry-server" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.767662 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.790354 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-74qdp"] Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.840268 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/372a8d96-f5ef-4ba9-8112-efebf08eef57-utilities\") pod \"redhat-marketplace-74qdp\" (UID: \"372a8d96-f5ef-4ba9-8112-efebf08eef57\") " pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.840480 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/372a8d96-f5ef-4ba9-8112-efebf08eef57-catalog-content\") pod \"redhat-marketplace-74qdp\" (UID: \"372a8d96-f5ef-4ba9-8112-efebf08eef57\") " pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.840528 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xxff\" (UniqueName: \"kubernetes.io/projected/372a8d96-f5ef-4ba9-8112-efebf08eef57-kube-api-access-6xxff\") pod \"redhat-marketplace-74qdp\" (UID: \"372a8d96-f5ef-4ba9-8112-efebf08eef57\") " pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.943788 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/372a8d96-f5ef-4ba9-8112-efebf08eef57-utilities\") pod \"redhat-marketplace-74qdp\" (UID: \"372a8d96-f5ef-4ba9-8112-efebf08eef57\") " pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.946668 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/372a8d96-f5ef-4ba9-8112-efebf08eef57-catalog-content\") pod \"redhat-marketplace-74qdp\" (UID: \"372a8d96-f5ef-4ba9-8112-efebf08eef57\") " pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.944893 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/372a8d96-f5ef-4ba9-8112-efebf08eef57-utilities\") pod \"redhat-marketplace-74qdp\" (UID: \"372a8d96-f5ef-4ba9-8112-efebf08eef57\") " pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.946722 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xxff\" (UniqueName: \"kubernetes.io/projected/372a8d96-f5ef-4ba9-8112-efebf08eef57-kube-api-access-6xxff\") pod \"redhat-marketplace-74qdp\" (UID: \"372a8d96-f5ef-4ba9-8112-efebf08eef57\") " pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.947293 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/372a8d96-f5ef-4ba9-8112-efebf08eef57-catalog-content\") pod \"redhat-marketplace-74qdp\" (UID: \"372a8d96-f5ef-4ba9-8112-efebf08eef57\") " pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:39 crc kubenswrapper[4919]: I0930 20:59:39.973417 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xxff\" (UniqueName: \"kubernetes.io/projected/372a8d96-f5ef-4ba9-8112-efebf08eef57-kube-api-access-6xxff\") pod \"redhat-marketplace-74qdp\" (UID: \"372a8d96-f5ef-4ba9-8112-efebf08eef57\") " pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:40 crc kubenswrapper[4919]: I0930 20:59:40.088365 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:40 crc kubenswrapper[4919]: I0930 20:59:40.579798 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-74qdp"] Sep 30 20:59:41 crc kubenswrapper[4919]: I0930 20:59:41.393273 4919 generic.go:334] "Generic (PLEG): container finished" podID="372a8d96-f5ef-4ba9-8112-efebf08eef57" containerID="65d7f5a9645940cec76d6f687b0df213e3d0902d2fd137e32310416dafe48d8e" exitCode=0 Sep 30 20:59:41 crc kubenswrapper[4919]: I0930 20:59:41.393335 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74qdp" event={"ID":"372a8d96-f5ef-4ba9-8112-efebf08eef57","Type":"ContainerDied","Data":"65d7f5a9645940cec76d6f687b0df213e3d0902d2fd137e32310416dafe48d8e"} Sep 30 20:59:41 crc kubenswrapper[4919]: I0930 20:59:41.393369 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74qdp" event={"ID":"372a8d96-f5ef-4ba9-8112-efebf08eef57","Type":"ContainerStarted","Data":"a31151427f379f9dd53a4dad9e4fffa9641ca831487a1a3df9dafa1e6e3c4cd7"} Sep 30 20:59:44 crc kubenswrapper[4919]: I0930 20:59:44.426686 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74qdp" event={"ID":"372a8d96-f5ef-4ba9-8112-efebf08eef57","Type":"ContainerStarted","Data":"1eb9378fe4fc2268a9f213ee04d5ace0b9e520a671cadab9e9753062ed806ac8"} Sep 30 20:59:45 crc kubenswrapper[4919]: I0930 20:59:45.440873 4919 generic.go:334] "Generic (PLEG): container finished" podID="372a8d96-f5ef-4ba9-8112-efebf08eef57" containerID="1eb9378fe4fc2268a9f213ee04d5ace0b9e520a671cadab9e9753062ed806ac8" exitCode=0 Sep 30 20:59:45 crc kubenswrapper[4919]: I0930 20:59:45.440952 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74qdp" event={"ID":"372a8d96-f5ef-4ba9-8112-efebf08eef57","Type":"ContainerDied","Data":"1eb9378fe4fc2268a9f213ee04d5ace0b9e520a671cadab9e9753062ed806ac8"} Sep 30 20:59:47 crc kubenswrapper[4919]: I0930 20:59:47.464752 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74qdp" event={"ID":"372a8d96-f5ef-4ba9-8112-efebf08eef57","Type":"ContainerStarted","Data":"a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46"} Sep 30 20:59:47 crc kubenswrapper[4919]: I0930 20:59:47.491152 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-74qdp" podStartSLOduration=3.199071009 podStartE2EDuration="8.491133687s" podCreationTimestamp="2025-09-30 20:59:39 +0000 UTC" firstStartedPulling="2025-09-30 20:59:41.395475738 +0000 UTC m=+2766.511508865" lastFinishedPulling="2025-09-30 20:59:46.687538396 +0000 UTC m=+2771.803571543" observedRunningTime="2025-09-30 20:59:47.487818231 +0000 UTC m=+2772.603851358" watchObservedRunningTime="2025-09-30 20:59:47.491133687 +0000 UTC m=+2772.607166804" Sep 30 20:59:50 crc kubenswrapper[4919]: I0930 20:59:50.088570 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:50 crc kubenswrapper[4919]: I0930 20:59:50.090028 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:50 crc kubenswrapper[4919]: I0930 20:59:50.140263 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:51 crc kubenswrapper[4919]: I0930 20:59:51.563271 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:51 crc kubenswrapper[4919]: I0930 20:59:51.658443 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-74qdp"] Sep 30 20:59:53 crc kubenswrapper[4919]: I0930 20:59:53.524848 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-74qdp" podUID="372a8d96-f5ef-4ba9-8112-efebf08eef57" containerName="registry-server" containerID="cri-o://a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46" gracePeriod=2 Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.536953 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.537246 4919 generic.go:334] "Generic (PLEG): container finished" podID="372a8d96-f5ef-4ba9-8112-efebf08eef57" containerID="a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46" exitCode=0 Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.537293 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74qdp" event={"ID":"372a8d96-f5ef-4ba9-8112-efebf08eef57","Type":"ContainerDied","Data":"a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46"} Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.537324 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-74qdp" event={"ID":"372a8d96-f5ef-4ba9-8112-efebf08eef57","Type":"ContainerDied","Data":"a31151427f379f9dd53a4dad9e4fffa9641ca831487a1a3df9dafa1e6e3c4cd7"} Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.537345 4919 scope.go:117] "RemoveContainer" containerID="a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.575206 4919 scope.go:117] "RemoveContainer" containerID="1eb9378fe4fc2268a9f213ee04d5ace0b9e520a671cadab9e9753062ed806ac8" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.603696 4919 scope.go:117] "RemoveContainer" containerID="65d7f5a9645940cec76d6f687b0df213e3d0902d2fd137e32310416dafe48d8e" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.645378 4919 scope.go:117] "RemoveContainer" containerID="a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46" Sep 30 20:59:54 crc kubenswrapper[4919]: E0930 20:59:54.645737 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46\": container with ID starting with a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46 not found: ID does not exist" containerID="a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.645771 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46"} err="failed to get container status \"a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46\": rpc error: code = NotFound desc = could not find container \"a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46\": container with ID starting with a42c332d23369898ce8d1a0f19e7ff8e9719a030834016ca0cd1cf764170ba46 not found: ID does not exist" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.645792 4919 scope.go:117] "RemoveContainer" containerID="1eb9378fe4fc2268a9f213ee04d5ace0b9e520a671cadab9e9753062ed806ac8" Sep 30 20:59:54 crc kubenswrapper[4919]: E0930 20:59:54.646030 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1eb9378fe4fc2268a9f213ee04d5ace0b9e520a671cadab9e9753062ed806ac8\": container with ID starting with 1eb9378fe4fc2268a9f213ee04d5ace0b9e520a671cadab9e9753062ed806ac8 not found: ID does not exist" containerID="1eb9378fe4fc2268a9f213ee04d5ace0b9e520a671cadab9e9753062ed806ac8" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.646058 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1eb9378fe4fc2268a9f213ee04d5ace0b9e520a671cadab9e9753062ed806ac8"} err="failed to get container status \"1eb9378fe4fc2268a9f213ee04d5ace0b9e520a671cadab9e9753062ed806ac8\": rpc error: code = NotFound desc = could not find container \"1eb9378fe4fc2268a9f213ee04d5ace0b9e520a671cadab9e9753062ed806ac8\": container with ID starting with 1eb9378fe4fc2268a9f213ee04d5ace0b9e520a671cadab9e9753062ed806ac8 not found: ID does not exist" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.646076 4919 scope.go:117] "RemoveContainer" containerID="65d7f5a9645940cec76d6f687b0df213e3d0902d2fd137e32310416dafe48d8e" Sep 30 20:59:54 crc kubenswrapper[4919]: E0930 20:59:54.646542 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65d7f5a9645940cec76d6f687b0df213e3d0902d2fd137e32310416dafe48d8e\": container with ID starting with 65d7f5a9645940cec76d6f687b0df213e3d0902d2fd137e32310416dafe48d8e not found: ID does not exist" containerID="65d7f5a9645940cec76d6f687b0df213e3d0902d2fd137e32310416dafe48d8e" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.646581 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65d7f5a9645940cec76d6f687b0df213e3d0902d2fd137e32310416dafe48d8e"} err="failed to get container status \"65d7f5a9645940cec76d6f687b0df213e3d0902d2fd137e32310416dafe48d8e\": rpc error: code = NotFound desc = could not find container \"65d7f5a9645940cec76d6f687b0df213e3d0902d2fd137e32310416dafe48d8e\": container with ID starting with 65d7f5a9645940cec76d6f687b0df213e3d0902d2fd137e32310416dafe48d8e not found: ID does not exist" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.736964 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/372a8d96-f5ef-4ba9-8112-efebf08eef57-utilities\") pod \"372a8d96-f5ef-4ba9-8112-efebf08eef57\" (UID: \"372a8d96-f5ef-4ba9-8112-efebf08eef57\") " Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.737179 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/372a8d96-f5ef-4ba9-8112-efebf08eef57-catalog-content\") pod \"372a8d96-f5ef-4ba9-8112-efebf08eef57\" (UID: \"372a8d96-f5ef-4ba9-8112-efebf08eef57\") " Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.737262 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xxff\" (UniqueName: \"kubernetes.io/projected/372a8d96-f5ef-4ba9-8112-efebf08eef57-kube-api-access-6xxff\") pod \"372a8d96-f5ef-4ba9-8112-efebf08eef57\" (UID: \"372a8d96-f5ef-4ba9-8112-efebf08eef57\") " Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.739778 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/372a8d96-f5ef-4ba9-8112-efebf08eef57-utilities" (OuterVolumeSpecName: "utilities") pod "372a8d96-f5ef-4ba9-8112-efebf08eef57" (UID: "372a8d96-f5ef-4ba9-8112-efebf08eef57"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.746698 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/372a8d96-f5ef-4ba9-8112-efebf08eef57-kube-api-access-6xxff" (OuterVolumeSpecName: "kube-api-access-6xxff") pod "372a8d96-f5ef-4ba9-8112-efebf08eef57" (UID: "372a8d96-f5ef-4ba9-8112-efebf08eef57"). InnerVolumeSpecName "kube-api-access-6xxff". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.756760 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/372a8d96-f5ef-4ba9-8112-efebf08eef57-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "372a8d96-f5ef-4ba9-8112-efebf08eef57" (UID: "372a8d96-f5ef-4ba9-8112-efebf08eef57"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.840841 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xxff\" (UniqueName: \"kubernetes.io/projected/372a8d96-f5ef-4ba9-8112-efebf08eef57-kube-api-access-6xxff\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.840909 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/372a8d96-f5ef-4ba9-8112-efebf08eef57-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:54 crc kubenswrapper[4919]: I0930 20:59:54.840936 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/372a8d96-f5ef-4ba9-8112-efebf08eef57-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 20:59:55 crc kubenswrapper[4919]: I0930 20:59:55.549555 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-74qdp" Sep 30 20:59:55 crc kubenswrapper[4919]: I0930 20:59:55.588316 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-74qdp"] Sep 30 20:59:55 crc kubenswrapper[4919]: I0930 20:59:55.596385 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-74qdp"] Sep 30 20:59:55 crc kubenswrapper[4919]: I0930 20:59:55.647088 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="372a8d96-f5ef-4ba9-8112-efebf08eef57" path="/var/lib/kubelet/pods/372a8d96-f5ef-4ba9-8112-efebf08eef57/volumes" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.162953 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4"] Sep 30 21:00:00 crc kubenswrapper[4919]: E0930 21:00:00.163926 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="372a8d96-f5ef-4ba9-8112-efebf08eef57" containerName="extract-content" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.163942 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="372a8d96-f5ef-4ba9-8112-efebf08eef57" containerName="extract-content" Sep 30 21:00:00 crc kubenswrapper[4919]: E0930 21:00:00.163965 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="372a8d96-f5ef-4ba9-8112-efebf08eef57" containerName="extract-utilities" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.163974 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="372a8d96-f5ef-4ba9-8112-efebf08eef57" containerName="extract-utilities" Sep 30 21:00:00 crc kubenswrapper[4919]: E0930 21:00:00.163990 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="372a8d96-f5ef-4ba9-8112-efebf08eef57" containerName="registry-server" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.163999 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="372a8d96-f5ef-4ba9-8112-efebf08eef57" containerName="registry-server" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.164313 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="372a8d96-f5ef-4ba9-8112-efebf08eef57" containerName="registry-server" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.165249 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.166956 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.167234 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.180192 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4"] Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.262141 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a22d82e3-8741-47a8-a607-2c9338b0fe3a-secret-volume\") pod \"collect-profiles-29321100-lrgc4\" (UID: \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.262242 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zksrp\" (UniqueName: \"kubernetes.io/projected/a22d82e3-8741-47a8-a607-2c9338b0fe3a-kube-api-access-zksrp\") pod \"collect-profiles-29321100-lrgc4\" (UID: \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.262407 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a22d82e3-8741-47a8-a607-2c9338b0fe3a-config-volume\") pod \"collect-profiles-29321100-lrgc4\" (UID: \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.363970 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a22d82e3-8741-47a8-a607-2c9338b0fe3a-config-volume\") pod \"collect-profiles-29321100-lrgc4\" (UID: \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.364065 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a22d82e3-8741-47a8-a607-2c9338b0fe3a-secret-volume\") pod \"collect-profiles-29321100-lrgc4\" (UID: \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.364173 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zksrp\" (UniqueName: \"kubernetes.io/projected/a22d82e3-8741-47a8-a607-2c9338b0fe3a-kube-api-access-zksrp\") pod \"collect-profiles-29321100-lrgc4\" (UID: \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.365249 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a22d82e3-8741-47a8-a607-2c9338b0fe3a-config-volume\") pod \"collect-profiles-29321100-lrgc4\" (UID: \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.376118 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a22d82e3-8741-47a8-a607-2c9338b0fe3a-secret-volume\") pod \"collect-profiles-29321100-lrgc4\" (UID: \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.389876 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zksrp\" (UniqueName: \"kubernetes.io/projected/a22d82e3-8741-47a8-a607-2c9338b0fe3a-kube-api-access-zksrp\") pod \"collect-profiles-29321100-lrgc4\" (UID: \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.493252 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:00 crc kubenswrapper[4919]: I0930 21:00:00.940956 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4"] Sep 30 21:00:01 crc kubenswrapper[4919]: I0930 21:00:01.606416 4919 generic.go:334] "Generic (PLEG): container finished" podID="a22d82e3-8741-47a8-a607-2c9338b0fe3a" containerID="73e5742efcc0f37f66fa49824ebee5e3fb79fca7377813f38a79b02d5e8efb93" exitCode=0 Sep 30 21:00:01 crc kubenswrapper[4919]: I0930 21:00:01.606625 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" event={"ID":"a22d82e3-8741-47a8-a607-2c9338b0fe3a","Type":"ContainerDied","Data":"73e5742efcc0f37f66fa49824ebee5e3fb79fca7377813f38a79b02d5e8efb93"} Sep 30 21:00:01 crc kubenswrapper[4919]: I0930 21:00:01.606980 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" event={"ID":"a22d82e3-8741-47a8-a607-2c9338b0fe3a","Type":"ContainerStarted","Data":"bf449616911673e262a6518db58367627891c5828c51da6cfcb9d218aa4076b6"} Sep 30 21:00:02 crc kubenswrapper[4919]: I0930 21:00:02.985852 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:03 crc kubenswrapper[4919]: I0930 21:00:03.153513 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zksrp\" (UniqueName: \"kubernetes.io/projected/a22d82e3-8741-47a8-a607-2c9338b0fe3a-kube-api-access-zksrp\") pod \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\" (UID: \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\") " Sep 30 21:00:03 crc kubenswrapper[4919]: I0930 21:00:03.153811 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a22d82e3-8741-47a8-a607-2c9338b0fe3a-secret-volume\") pod \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\" (UID: \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\") " Sep 30 21:00:03 crc kubenswrapper[4919]: I0930 21:00:03.154085 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a22d82e3-8741-47a8-a607-2c9338b0fe3a-config-volume\") pod \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\" (UID: \"a22d82e3-8741-47a8-a607-2c9338b0fe3a\") " Sep 30 21:00:03 crc kubenswrapper[4919]: I0930 21:00:03.155437 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a22d82e3-8741-47a8-a607-2c9338b0fe3a-config-volume" (OuterVolumeSpecName: "config-volume") pod "a22d82e3-8741-47a8-a607-2c9338b0fe3a" (UID: "a22d82e3-8741-47a8-a607-2c9338b0fe3a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:00:03 crc kubenswrapper[4919]: I0930 21:00:03.159930 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a22d82e3-8741-47a8-a607-2c9338b0fe3a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a22d82e3-8741-47a8-a607-2c9338b0fe3a" (UID: "a22d82e3-8741-47a8-a607-2c9338b0fe3a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:03 crc kubenswrapper[4919]: I0930 21:00:03.160506 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a22d82e3-8741-47a8-a607-2c9338b0fe3a-kube-api-access-zksrp" (OuterVolumeSpecName: "kube-api-access-zksrp") pod "a22d82e3-8741-47a8-a607-2c9338b0fe3a" (UID: "a22d82e3-8741-47a8-a607-2c9338b0fe3a"). InnerVolumeSpecName "kube-api-access-zksrp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:03 crc kubenswrapper[4919]: I0930 21:00:03.257413 4919 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a22d82e3-8741-47a8-a607-2c9338b0fe3a-config-volume\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:03 crc kubenswrapper[4919]: I0930 21:00:03.257457 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zksrp\" (UniqueName: \"kubernetes.io/projected/a22d82e3-8741-47a8-a607-2c9338b0fe3a-kube-api-access-zksrp\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:03 crc kubenswrapper[4919]: I0930 21:00:03.257472 4919 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a22d82e3-8741-47a8-a607-2c9338b0fe3a-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:03 crc kubenswrapper[4919]: I0930 21:00:03.628238 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" event={"ID":"a22d82e3-8741-47a8-a607-2c9338b0fe3a","Type":"ContainerDied","Data":"bf449616911673e262a6518db58367627891c5828c51da6cfcb9d218aa4076b6"} Sep 30 21:00:03 crc kubenswrapper[4919]: I0930 21:00:03.628747 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf449616911673e262a6518db58367627891c5828c51da6cfcb9d218aa4076b6" Sep 30 21:00:03 crc kubenswrapper[4919]: I0930 21:00:03.628410 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321100-lrgc4" Sep 30 21:00:04 crc kubenswrapper[4919]: I0930 21:00:04.062936 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks"] Sep 30 21:00:04 crc kubenswrapper[4919]: I0930 21:00:04.078429 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321055-2qkks"] Sep 30 21:00:05 crc kubenswrapper[4919]: I0930 21:00:05.647073 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65daa38d-8652-4438-af0e-5afc3524e5d4" path="/var/lib/kubelet/pods/65daa38d-8652-4438-af0e-5afc3524e5d4/volumes" Sep 30 21:00:07 crc kubenswrapper[4919]: I0930 21:00:07.671868 4919 generic.go:334] "Generic (PLEG): container finished" podID="e4e968a1-eb51-4c2e-9672-ff0a6f050948" containerID="422483ac30a9fa2a191bef18fe7eca7dc8f77929e7af8fa2d7409bb19f1af7ef" exitCode=0 Sep 30 21:00:07 crc kubenswrapper[4919]: I0930 21:00:07.671973 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" event={"ID":"e4e968a1-eb51-4c2e-9672-ff0a6f050948","Type":"ContainerDied","Data":"422483ac30a9fa2a191bef18fe7eca7dc8f77929e7af8fa2d7409bb19f1af7ef"} Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.214857 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.289682 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-telemetry-combined-ca-bundle\") pod \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.289742 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-1\") pod \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.289812 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-inventory\") pod \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.289940 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ssh-key\") pod \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.289976 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62fvf\" (UniqueName: \"kubernetes.io/projected/e4e968a1-eb51-4c2e-9672-ff0a6f050948-kube-api-access-62fvf\") pod \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.290012 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-2\") pod \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.290055 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-0\") pod \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\" (UID: \"e4e968a1-eb51-4c2e-9672-ff0a6f050948\") " Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.295634 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4e968a1-eb51-4c2e-9672-ff0a6f050948-kube-api-access-62fvf" (OuterVolumeSpecName: "kube-api-access-62fvf") pod "e4e968a1-eb51-4c2e-9672-ff0a6f050948" (UID: "e4e968a1-eb51-4c2e-9672-ff0a6f050948"). InnerVolumeSpecName "kube-api-access-62fvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.298085 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "e4e968a1-eb51-4c2e-9672-ff0a6f050948" (UID: "e4e968a1-eb51-4c2e-9672-ff0a6f050948"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.317378 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "e4e968a1-eb51-4c2e-9672-ff0a6f050948" (UID: "e4e968a1-eb51-4c2e-9672-ff0a6f050948"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.326448 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e4e968a1-eb51-4c2e-9672-ff0a6f050948" (UID: "e4e968a1-eb51-4c2e-9672-ff0a6f050948"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.329833 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "e4e968a1-eb51-4c2e-9672-ff0a6f050948" (UID: "e4e968a1-eb51-4c2e-9672-ff0a6f050948"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.351813 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "e4e968a1-eb51-4c2e-9672-ff0a6f050948" (UID: "e4e968a1-eb51-4c2e-9672-ff0a6f050948"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.367616 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-inventory" (OuterVolumeSpecName: "inventory") pod "e4e968a1-eb51-4c2e-9672-ff0a6f050948" (UID: "e4e968a1-eb51-4c2e-9672-ff0a6f050948"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.400692 4919 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.400748 4919 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.400762 4919 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.400777 4919 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.400795 4919 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-inventory\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.400809 4919 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e4e968a1-eb51-4c2e-9672-ff0a6f050948-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.400822 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62fvf\" (UniqueName: \"kubernetes.io/projected/e4e968a1-eb51-4c2e-9672-ff0a6f050948-kube-api-access-62fvf\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.694966 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" event={"ID":"e4e968a1-eb51-4c2e-9672-ff0a6f050948","Type":"ContainerDied","Data":"182abf408e15907f2c69b64e3272c925d8e8e346a2371e049b2d8c9d2991c70a"} Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.695017 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="182abf408e15907f2c69b64e3272c925d8e8e346a2371e049b2d8c9d2991c70a" Sep 30 21:00:09 crc kubenswrapper[4919]: I0930 21:00:09.695591 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-fnww4" Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.514808 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.515492 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="af763f18-f1aa-442d-aaac-a6b4353ce21b" containerName="kube-state-metrics" containerID="cri-o://65c27ca22994224f5341e4465b1ee63843a9e525bf4b7f7e811192722b1739a9" gracePeriod=30 Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.564241 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.564768 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="ceilometer-central-agent" containerID="cri-o://ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc" gracePeriod=30 Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.564810 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="proxy-httpd" containerID="cri-o://f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec" gracePeriod=30 Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.564858 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="sg-core" containerID="cri-o://5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e" gracePeriod=30 Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.564869 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="ceilometer-notification-agent" containerID="cri-o://3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c" gracePeriod=30 Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.795608 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerID="f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec" exitCode=0 Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.799490 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerID="5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e" exitCode=2 Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.799647 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eb151a3b-de15-43b0-be4e-83b9bb2a35a6","Type":"ContainerDied","Data":"f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec"} Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.799685 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eb151a3b-de15-43b0-be4e-83b9bb2a35a6","Type":"ContainerDied","Data":"5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e"} Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.801678 4919 generic.go:334] "Generic (PLEG): container finished" podID="af763f18-f1aa-442d-aaac-a6b4353ce21b" containerID="65c27ca22994224f5341e4465b1ee63843a9e525bf4b7f7e811192722b1739a9" exitCode=2 Sep 30 21:00:17 crc kubenswrapper[4919]: I0930 21:00:17.801705 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"af763f18-f1aa-442d-aaac-a6b4353ce21b","Type":"ContainerDied","Data":"65c27ca22994224f5341e4465b1ee63843a9e525bf4b7f7e811192722b1739a9"} Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.035073 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.183283 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-state-metrics-tls-config\") pod \"af763f18-f1aa-442d-aaac-a6b4353ce21b\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.184585 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-state-metrics-tls-certs\") pod \"af763f18-f1aa-442d-aaac-a6b4353ce21b\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.184853 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdv7b\" (UniqueName: \"kubernetes.io/projected/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-api-access-tdv7b\") pod \"af763f18-f1aa-442d-aaac-a6b4353ce21b\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.185075 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-combined-ca-bundle\") pod \"af763f18-f1aa-442d-aaac-a6b4353ce21b\" (UID: \"af763f18-f1aa-442d-aaac-a6b4353ce21b\") " Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.192187 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-api-access-tdv7b" (OuterVolumeSpecName: "kube-api-access-tdv7b") pod "af763f18-f1aa-442d-aaac-a6b4353ce21b" (UID: "af763f18-f1aa-442d-aaac-a6b4353ce21b"). InnerVolumeSpecName "kube-api-access-tdv7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.211786 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "af763f18-f1aa-442d-aaac-a6b4353ce21b" (UID: "af763f18-f1aa-442d-aaac-a6b4353ce21b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.214897 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "af763f18-f1aa-442d-aaac-a6b4353ce21b" (UID: "af763f18-f1aa-442d-aaac-a6b4353ce21b"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.233767 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "af763f18-f1aa-442d-aaac-a6b4353ce21b" (UID: "af763f18-f1aa-442d-aaac-a6b4353ce21b"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.287309 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdv7b\" (UniqueName: \"kubernetes.io/projected/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-api-access-tdv7b\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.287551 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.287612 4919 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.287668 4919 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/af763f18-f1aa-442d-aaac-a6b4353ce21b-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.576134 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.209:3000/\": dial tcp 10.217.0.209:3000: connect: connection refused" Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.816140 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"af763f18-f1aa-442d-aaac-a6b4353ce21b","Type":"ContainerDied","Data":"1849cc5002c5d74d89f45b4ef2347343e2e06f08264ccd3ddebd2f4c657f2c62"} Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.816150 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.816630 4919 scope.go:117] "RemoveContainer" containerID="65c27ca22994224f5341e4465b1ee63843a9e525bf4b7f7e811192722b1739a9" Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.822129 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerID="ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc" exitCode=0 Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.822168 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eb151a3b-de15-43b0-be4e-83b9bb2a35a6","Type":"ContainerDied","Data":"ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc"} Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.853388 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 21:00:18 crc kubenswrapper[4919]: I0930 21:00:18.861255 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 30 21:00:19 crc kubenswrapper[4919]: I0930 21:00:19.652650 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af763f18-f1aa-442d-aaac-a6b4353ce21b" path="/var/lib/kubelet/pods/af763f18-f1aa-442d-aaac-a6b4353ce21b/volumes" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.329947 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.457232 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-run-httpd\") pod \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.457325 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-sg-core-conf-yaml\") pod \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.457372 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-ceilometer-tls-certs\") pod \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.457420 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-scripts\") pod \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.457495 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-config-data\") pod \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.457519 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f87zl\" (UniqueName: \"kubernetes.io/projected/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-kube-api-access-f87zl\") pod \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.457617 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-combined-ca-bundle\") pod \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.457704 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-log-httpd\") pod \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\" (UID: \"eb151a3b-de15-43b0-be4e-83b9bb2a35a6\") " Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.458042 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "eb151a3b-de15-43b0-be4e-83b9bb2a35a6" (UID: "eb151a3b-de15-43b0-be4e-83b9bb2a35a6"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.458447 4919 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.458487 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "eb151a3b-de15-43b0-be4e-83b9bb2a35a6" (UID: "eb151a3b-de15-43b0-be4e-83b9bb2a35a6"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.463797 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-scripts" (OuterVolumeSpecName: "scripts") pod "eb151a3b-de15-43b0-be4e-83b9bb2a35a6" (UID: "eb151a3b-de15-43b0-be4e-83b9bb2a35a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.477630 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-kube-api-access-f87zl" (OuterVolumeSpecName: "kube-api-access-f87zl") pod "eb151a3b-de15-43b0-be4e-83b9bb2a35a6" (UID: "eb151a3b-de15-43b0-be4e-83b9bb2a35a6"). InnerVolumeSpecName "kube-api-access-f87zl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.511502 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "eb151a3b-de15-43b0-be4e-83b9bb2a35a6" (UID: "eb151a3b-de15-43b0-be4e-83b9bb2a35a6"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.557826 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb151a3b-de15-43b0-be4e-83b9bb2a35a6" (UID: "eb151a3b-de15-43b0-be4e-83b9bb2a35a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.560134 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.560168 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f87zl\" (UniqueName: \"kubernetes.io/projected/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-kube-api-access-f87zl\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.560183 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.560194 4919 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.560205 4919 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.567516 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "eb151a3b-de15-43b0-be4e-83b9bb2a35a6" (UID: "eb151a3b-de15-43b0-be4e-83b9bb2a35a6"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.604135 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-config-data" (OuterVolumeSpecName: "config-data") pod "eb151a3b-de15-43b0-be4e-83b9bb2a35a6" (UID: "eb151a3b-de15-43b0-be4e-83b9bb2a35a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.662287 4919 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.662324 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb151a3b-de15-43b0-be4e-83b9bb2a35a6-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.808200 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/collector-vcpp6"] Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.808455 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-logging/collector-vcpp6" podUID="2edf0652-09cd-4eb0-915b-2fa6e0554a36" containerName="collector" containerID="cri-o://875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c" gracePeriod=10 Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.864587 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerID="3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c" exitCode=0 Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.864625 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eb151a3b-de15-43b0-be4e-83b9bb2a35a6","Type":"ContainerDied","Data":"3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c"} Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.864651 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"eb151a3b-de15-43b0-be4e-83b9bb2a35a6","Type":"ContainerDied","Data":"b7986192215c33d6afcf280d01cd84a24e28cda858277fef2e0a198dca4b7787"} Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.864666 4919 scope.go:117] "RemoveContainer" containerID="f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.864879 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.898857 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.902868 4919 scope.go:117] "RemoveContainer" containerID="5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.906844 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.927659 4919 scope.go:117] "RemoveContainer" containerID="3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.961112 4919 scope.go:117] "RemoveContainer" containerID="ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.983010 4919 scope.go:117] "RemoveContainer" containerID="f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec" Sep 30 21:00:22 crc kubenswrapper[4919]: E0930 21:00:22.983385 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec\": container with ID starting with f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec not found: ID does not exist" containerID="f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.983412 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec"} err="failed to get container status \"f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec\": rpc error: code = NotFound desc = could not find container \"f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec\": container with ID starting with f12bce62f0a318a02c1b790147ba53e81b67bd2dac9fe34532132a877d5adcec not found: ID does not exist" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.983431 4919 scope.go:117] "RemoveContainer" containerID="5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e" Sep 30 21:00:22 crc kubenswrapper[4919]: E0930 21:00:22.983801 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e\": container with ID starting with 5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e not found: ID does not exist" containerID="5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.983814 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e"} err="failed to get container status \"5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e\": rpc error: code = NotFound desc = could not find container \"5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e\": container with ID starting with 5f5c01f1b1a95b24d6d60425f06b74567a667c56cf05aab9305a0f3117b0c01e not found: ID does not exist" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.983825 4919 scope.go:117] "RemoveContainer" containerID="3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c" Sep 30 21:00:22 crc kubenswrapper[4919]: E0930 21:00:22.984056 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c\": container with ID starting with 3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c not found: ID does not exist" containerID="3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.984071 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c"} err="failed to get container status \"3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c\": rpc error: code = NotFound desc = could not find container \"3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c\": container with ID starting with 3ca0ab6e00f77e5a583cd42eda814bb4d453a15b260915efeb1b349c3611aa1c not found: ID does not exist" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.984084 4919 scope.go:117] "RemoveContainer" containerID="ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc" Sep 30 21:00:22 crc kubenswrapper[4919]: E0930 21:00:22.984482 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc\": container with ID starting with ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc not found: ID does not exist" containerID="ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc" Sep 30 21:00:22 crc kubenswrapper[4919]: I0930 21:00:22.984501 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc"} err="failed to get container status \"ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc\": rpc error: code = NotFound desc = could not find container \"ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc\": container with ID starting with ad8e007bde729406c2c1abfd1c9e6f342a38e7170869578ae25cd5b0189169cc not found: ID does not exist" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.447104 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-vcpp6" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.578889 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/2edf0652-09cd-4eb0-915b-2fa6e0554a36-sa-token\") pod \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.578963 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-trusted-ca\") pod \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.578981 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-collector-token\") pod \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.579013 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-config-openshift-service-cacrt\") pod \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.579029 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-metrics\") pod \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.579051 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-config\") pod \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.579151 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/2edf0652-09cd-4eb0-915b-2fa6e0554a36-tmp\") pod \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.579169 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-entrypoint\") pod \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.579205 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bw75l\" (UniqueName: \"kubernetes.io/projected/2edf0652-09cd-4eb0-915b-2fa6e0554a36-kube-api-access-bw75l\") pod \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.579278 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-collector-syslog-receiver\") pod \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.579376 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/2edf0652-09cd-4eb0-915b-2fa6e0554a36-datadir\") pod \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\" (UID: \"2edf0652-09cd-4eb0-915b-2fa6e0554a36\") " Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.579747 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2edf0652-09cd-4eb0-915b-2fa6e0554a36-datadir" (OuterVolumeSpecName: "datadir") pod "2edf0652-09cd-4eb0-915b-2fa6e0554a36" (UID: "2edf0652-09cd-4eb0-915b-2fa6e0554a36"). InnerVolumeSpecName "datadir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.580041 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "2edf0652-09cd-4eb0-915b-2fa6e0554a36" (UID: "2edf0652-09cd-4eb0-915b-2fa6e0554a36"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.582603 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-config" (OuterVolumeSpecName: "config") pod "2edf0652-09cd-4eb0-915b-2fa6e0554a36" (UID: "2edf0652-09cd-4eb0-915b-2fa6e0554a36"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.582762 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-config-openshift-service-cacrt" (OuterVolumeSpecName: "config-openshift-service-cacrt") pod "2edf0652-09cd-4eb0-915b-2fa6e0554a36" (UID: "2edf0652-09cd-4eb0-915b-2fa6e0554a36"). InnerVolumeSpecName "config-openshift-service-cacrt". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.585928 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-collector-token" (OuterVolumeSpecName: "collector-token") pod "2edf0652-09cd-4eb0-915b-2fa6e0554a36" (UID: "2edf0652-09cd-4eb0-915b-2fa6e0554a36"). InnerVolumeSpecName "collector-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.590185 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-metrics" (OuterVolumeSpecName: "metrics") pod "2edf0652-09cd-4eb0-915b-2fa6e0554a36" (UID: "2edf0652-09cd-4eb0-915b-2fa6e0554a36"). InnerVolumeSpecName "metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.591611 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2edf0652-09cd-4eb0-915b-2fa6e0554a36-kube-api-access-bw75l" (OuterVolumeSpecName: "kube-api-access-bw75l") pod "2edf0652-09cd-4eb0-915b-2fa6e0554a36" (UID: "2edf0652-09cd-4eb0-915b-2fa6e0554a36"). InnerVolumeSpecName "kube-api-access-bw75l". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.591813 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2edf0652-09cd-4eb0-915b-2fa6e0554a36-sa-token" (OuterVolumeSpecName: "sa-token") pod "2edf0652-09cd-4eb0-915b-2fa6e0554a36" (UID: "2edf0652-09cd-4eb0-915b-2fa6e0554a36"). InnerVolumeSpecName "sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.595303 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2edf0652-09cd-4eb0-915b-2fa6e0554a36-tmp" (OuterVolumeSpecName: "tmp") pod "2edf0652-09cd-4eb0-915b-2fa6e0554a36" (UID: "2edf0652-09cd-4eb0-915b-2fa6e0554a36"). InnerVolumeSpecName "tmp". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.598389 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-collector-syslog-receiver" (OuterVolumeSpecName: "collector-syslog-receiver") pod "2edf0652-09cd-4eb0-915b-2fa6e0554a36" (UID: "2edf0652-09cd-4eb0-915b-2fa6e0554a36"). InnerVolumeSpecName "collector-syslog-receiver". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.629766 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-entrypoint" (OuterVolumeSpecName: "entrypoint") pod "2edf0652-09cd-4eb0-915b-2fa6e0554a36" (UID: "2edf0652-09cd-4eb0-915b-2fa6e0554a36"). InnerVolumeSpecName "entrypoint". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.651901 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" path="/var/lib/kubelet/pods/eb151a3b-de15-43b0-be4e-83b9bb2a35a6/volumes" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.656111 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/cluster-logging-operator-fcc886d58-tgthv"] Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.656667 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" podUID="83bfe237-5002-4f48-a10a-f6966ed9120c" containerName="cluster-logging-operator" containerID="cri-o://09ea728e94087c7fd5b782d3a0aa4074cfbcd1c2680834703a49efb9fdc3cad3" gracePeriod=30 Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.681344 4919 reconciler_common.go:293] "Volume detached for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/2edf0652-09cd-4eb0-915b-2fa6e0554a36-tmp\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.681379 4919 reconciler_common.go:293] "Volume detached for volume \"entrypoint\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-entrypoint\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.681391 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bw75l\" (UniqueName: \"kubernetes.io/projected/2edf0652-09cd-4eb0-915b-2fa6e0554a36-kube-api-access-bw75l\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.681404 4919 reconciler_common.go:293] "Volume detached for volume \"collector-syslog-receiver\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-collector-syslog-receiver\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.681417 4919 reconciler_common.go:293] "Volume detached for volume \"datadir\" (UniqueName: \"kubernetes.io/host-path/2edf0652-09cd-4eb0-915b-2fa6e0554a36-datadir\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.681429 4919 reconciler_common.go:293] "Volume detached for volume \"sa-token\" (UniqueName: \"kubernetes.io/projected/2edf0652-09cd-4eb0-915b-2fa6e0554a36-sa-token\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.681442 4919 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.681454 4919 reconciler_common.go:293] "Volume detached for volume \"collector-token\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-collector-token\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.681511 4919 reconciler_common.go:293] "Volume detached for volume \"config-openshift-service-cacrt\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-config-openshift-service-cacrt\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.681525 4919 reconciler_common.go:293] "Volume detached for volume \"metrics\" (UniqueName: \"kubernetes.io/secret/2edf0652-09cd-4eb0-915b-2fa6e0554a36-metrics\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.681538 4919 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2edf0652-09cd-4eb0-915b-2fa6e0554a36-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.884473 4919 generic.go:334] "Generic (PLEG): container finished" podID="83bfe237-5002-4f48-a10a-f6966ed9120c" containerID="09ea728e94087c7fd5b782d3a0aa4074cfbcd1c2680834703a49efb9fdc3cad3" exitCode=0 Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.884535 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" event={"ID":"83bfe237-5002-4f48-a10a-f6966ed9120c","Type":"ContainerDied","Data":"09ea728e94087c7fd5b782d3a0aa4074cfbcd1c2680834703a49efb9fdc3cad3"} Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.885985 4919 generic.go:334] "Generic (PLEG): container finished" podID="2edf0652-09cd-4eb0-915b-2fa6e0554a36" containerID="875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c" exitCode=0 Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.886018 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-vcpp6" event={"ID":"2edf0652-09cd-4eb0-915b-2fa6e0554a36","Type":"ContainerDied","Data":"875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c"} Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.886034 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/collector-vcpp6" event={"ID":"2edf0652-09cd-4eb0-915b-2fa6e0554a36","Type":"ContainerDied","Data":"e4c36dcd3f014345d52aa6557aa87f5429cf926192a04277cb93e0531e501c4e"} Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.886050 4919 scope.go:117] "RemoveContainer" containerID="875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.886154 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-logging/collector-vcpp6" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.925996 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/collector-vcpp6"] Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.939708 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-logging/collector-vcpp6"] Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.943787 4919 scope.go:117] "RemoveContainer" containerID="875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c" Sep 30 21:00:23 crc kubenswrapper[4919]: E0930 21:00:23.944451 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c\": container with ID starting with 875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c not found: ID does not exist" containerID="875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c" Sep 30 21:00:23 crc kubenswrapper[4919]: I0930 21:00:23.944536 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c"} err="failed to get container status \"875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c\": rpc error: code = NotFound desc = could not find container \"875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c\": container with ID starting with 875bc1c4f68fbd16393282955ab57ed1351124ebf54eb3659c1a7a6f311cb32c not found: ID does not exist" Sep 30 21:00:24 crc kubenswrapper[4919]: I0930 21:00:24.222946 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" Sep 30 21:00:24 crc kubenswrapper[4919]: I0930 21:00:24.397327 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bks9h\" (UniqueName: \"kubernetes.io/projected/83bfe237-5002-4f48-a10a-f6966ed9120c-kube-api-access-bks9h\") pod \"83bfe237-5002-4f48-a10a-f6966ed9120c\" (UID: \"83bfe237-5002-4f48-a10a-f6966ed9120c\") " Sep 30 21:00:24 crc kubenswrapper[4919]: I0930 21:00:24.404560 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83bfe237-5002-4f48-a10a-f6966ed9120c-kube-api-access-bks9h" (OuterVolumeSpecName: "kube-api-access-bks9h") pod "83bfe237-5002-4f48-a10a-f6966ed9120c" (UID: "83bfe237-5002-4f48-a10a-f6966ed9120c"). InnerVolumeSpecName "kube-api-access-bks9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:24 crc kubenswrapper[4919]: I0930 21:00:24.499974 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bks9h\" (UniqueName: \"kubernetes.io/projected/83bfe237-5002-4f48-a10a-f6966ed9120c-kube-api-access-bks9h\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:24 crc kubenswrapper[4919]: I0930 21:00:24.897471 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" event={"ID":"83bfe237-5002-4f48-a10a-f6966ed9120c","Type":"ContainerDied","Data":"73b44d627b85c8553b033f6cba2b75bbb1441ff232179878513154bb1c98f061"} Sep 30 21:00:24 crc kubenswrapper[4919]: I0930 21:00:24.897509 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-logging/cluster-logging-operator-fcc886d58-tgthv" Sep 30 21:00:24 crc kubenswrapper[4919]: I0930 21:00:24.897877 4919 scope.go:117] "RemoveContainer" containerID="09ea728e94087c7fd5b782d3a0aa4074cfbcd1c2680834703a49efb9fdc3cad3" Sep 30 21:00:24 crc kubenswrapper[4919]: I0930 21:00:24.936963 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-logging/cluster-logging-operator-fcc886d58-tgthv"] Sep 30 21:00:24 crc kubenswrapper[4919]: I0930 21:00:24.947620 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-logging/cluster-logging-operator-fcc886d58-tgthv"] Sep 30 21:00:25 crc kubenswrapper[4919]: I0930 21:00:25.655551 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2edf0652-09cd-4eb0-915b-2fa6e0554a36" path="/var/lib/kubelet/pods/2edf0652-09cd-4eb0-915b-2fa6e0554a36/volumes" Sep 30 21:00:25 crc kubenswrapper[4919]: I0930 21:00:25.656512 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83bfe237-5002-4f48-a10a-f6966ed9120c" path="/var/lib/kubelet/pods/83bfe237-5002-4f48-a10a-f6966ed9120c/volumes" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.785125 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg"] Sep 30 21:00:31 crc kubenswrapper[4919]: E0930 21:00:31.785981 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="proxy-httpd" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.785995 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="proxy-httpd" Sep 30 21:00:31 crc kubenswrapper[4919]: E0930 21:00:31.786008 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="sg-core" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786014 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="sg-core" Sep 30 21:00:31 crc kubenswrapper[4919]: E0930 21:00:31.786031 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a22d82e3-8741-47a8-a607-2c9338b0fe3a" containerName="collect-profiles" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786037 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="a22d82e3-8741-47a8-a607-2c9338b0fe3a" containerName="collect-profiles" Sep 30 21:00:31 crc kubenswrapper[4919]: E0930 21:00:31.786051 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af763f18-f1aa-442d-aaac-a6b4353ce21b" containerName="kube-state-metrics" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786056 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="af763f18-f1aa-442d-aaac-a6b4353ce21b" containerName="kube-state-metrics" Sep 30 21:00:31 crc kubenswrapper[4919]: E0930 21:00:31.786069 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="ceilometer-notification-agent" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786074 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="ceilometer-notification-agent" Sep 30 21:00:31 crc kubenswrapper[4919]: E0930 21:00:31.786085 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="ceilometer-central-agent" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786090 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="ceilometer-central-agent" Sep 30 21:00:31 crc kubenswrapper[4919]: E0930 21:00:31.786097 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2edf0652-09cd-4eb0-915b-2fa6e0554a36" containerName="collector" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786102 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="2edf0652-09cd-4eb0-915b-2fa6e0554a36" containerName="collector" Sep 30 21:00:31 crc kubenswrapper[4919]: E0930 21:00:31.786116 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4e968a1-eb51-4c2e-9672-ff0a6f050948" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786123 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4e968a1-eb51-4c2e-9672-ff0a6f050948" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 21:00:31 crc kubenswrapper[4919]: E0930 21:00:31.786134 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83bfe237-5002-4f48-a10a-f6966ed9120c" containerName="cluster-logging-operator" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786141 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="83bfe237-5002-4f48-a10a-f6966ed9120c" containerName="cluster-logging-operator" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786330 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="ceilometer-central-agent" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786345 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="83bfe237-5002-4f48-a10a-f6966ed9120c" containerName="cluster-logging-operator" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786354 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="proxy-httpd" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786366 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4e968a1-eb51-4c2e-9672-ff0a6f050948" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786377 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="ceilometer-notification-agent" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786389 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="af763f18-f1aa-442d-aaac-a6b4353ce21b" containerName="kube-state-metrics" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786398 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="a22d82e3-8741-47a8-a607-2c9338b0fe3a" containerName="collect-profiles" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786413 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb151a3b-de15-43b0-be4e-83b9bb2a35a6" containerName="sg-core" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.786423 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="2edf0652-09cd-4eb0-915b-2fa6e0554a36" containerName="collector" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.787649 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.791837 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.803235 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg"] Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.852208 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c228af8-0449-4b6c-95e3-ef80d378fbdd-bundle\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg\" (UID: \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.852328 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c228af8-0449-4b6c-95e3-ef80d378fbdd-util\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg\" (UID: \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.852367 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxkv8\" (UniqueName: \"kubernetes.io/projected/1c228af8-0449-4b6c-95e3-ef80d378fbdd-kube-api-access-jxkv8\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg\" (UID: \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.954618 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxkv8\" (UniqueName: \"kubernetes.io/projected/1c228af8-0449-4b6c-95e3-ef80d378fbdd-kube-api-access-jxkv8\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg\" (UID: \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.954775 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c228af8-0449-4b6c-95e3-ef80d378fbdd-bundle\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg\" (UID: \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.954833 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c228af8-0449-4b6c-95e3-ef80d378fbdd-util\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg\" (UID: \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.955272 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c228af8-0449-4b6c-95e3-ef80d378fbdd-util\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg\" (UID: \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.955310 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c228af8-0449-4b6c-95e3-ef80d378fbdd-bundle\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg\" (UID: \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:31 crc kubenswrapper[4919]: I0930 21:00:31.977760 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxkv8\" (UniqueName: \"kubernetes.io/projected/1c228af8-0449-4b6c-95e3-ef80d378fbdd-kube-api-access-jxkv8\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg\" (UID: \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:32 crc kubenswrapper[4919]: I0930 21:00:32.130293 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:32 crc kubenswrapper[4919]: I0930 21:00:32.603933 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg"] Sep 30 21:00:32 crc kubenswrapper[4919]: I0930 21:00:32.974153 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" event={"ID":"1c228af8-0449-4b6c-95e3-ef80d378fbdd","Type":"ContainerStarted","Data":"ca1853a31dbe0970ab5373f4a09c3db83eac29bc52d40313b90dc6386777766c"} Sep 30 21:00:32 crc kubenswrapper[4919]: I0930 21:00:32.974195 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" event={"ID":"1c228af8-0449-4b6c-95e3-ef80d378fbdd","Type":"ContainerStarted","Data":"e1d81f7908f2ef046373875b163042fd80a7e35694c79655b83a11086d77d0db"} Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.002093 4919 generic.go:334] "Generic (PLEG): container finished" podID="1c228af8-0449-4b6c-95e3-ef80d378fbdd" containerID="ca1853a31dbe0970ab5373f4a09c3db83eac29bc52d40313b90dc6386777766c" exitCode=0 Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.002128 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" event={"ID":"1c228af8-0449-4b6c-95e3-ef80d378fbdd","Type":"ContainerDied","Data":"ca1853a31dbe0970ab5373f4a09c3db83eac29bc52d40313b90dc6386777766c"} Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.692307 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.692832 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" containerName="openstackclient" containerID="cri-o://8de83e3f365bbddd01d15ab2d3899b89bf7ec111571f7724f136bfe4ccac6364" gracePeriod=2 Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.703844 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.742146 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 30 21:00:34 crc kubenswrapper[4919]: E0930 21:00:34.742653 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" containerName="openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.742675 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" containerName="openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.742896 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" containerName="openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.743705 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.764480 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.765926 4919 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" podUID="e3a2eec4-8bce-4afb-8eb9-e57417515312" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.819410 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3a2eec4-8bce-4afb-8eb9-e57417515312-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e3a2eec4-8bce-4afb-8eb9-e57417515312\") " pod="openstack/openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.819504 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lglsd\" (UniqueName: \"kubernetes.io/projected/e3a2eec4-8bce-4afb-8eb9-e57417515312-kube-api-access-lglsd\") pod \"openstackclient\" (UID: \"e3a2eec4-8bce-4afb-8eb9-e57417515312\") " pod="openstack/openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.819536 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3a2eec4-8bce-4afb-8eb9-e57417515312-openstack-config\") pod \"openstackclient\" (UID: \"e3a2eec4-8bce-4afb-8eb9-e57417515312\") " pod="openstack/openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.819600 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3a2eec4-8bce-4afb-8eb9-e57417515312-openstack-config-secret\") pod \"openstackclient\" (UID: \"e3a2eec4-8bce-4afb-8eb9-e57417515312\") " pod="openstack/openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.921399 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lglsd\" (UniqueName: \"kubernetes.io/projected/e3a2eec4-8bce-4afb-8eb9-e57417515312-kube-api-access-lglsd\") pod \"openstackclient\" (UID: \"e3a2eec4-8bce-4afb-8eb9-e57417515312\") " pod="openstack/openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.921455 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3a2eec4-8bce-4afb-8eb9-e57417515312-openstack-config\") pod \"openstackclient\" (UID: \"e3a2eec4-8bce-4afb-8eb9-e57417515312\") " pod="openstack/openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.921509 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3a2eec4-8bce-4afb-8eb9-e57417515312-openstack-config-secret\") pod \"openstackclient\" (UID: \"e3a2eec4-8bce-4afb-8eb9-e57417515312\") " pod="openstack/openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.921554 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3a2eec4-8bce-4afb-8eb9-e57417515312-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e3a2eec4-8bce-4afb-8eb9-e57417515312\") " pod="openstack/openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.922537 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e3a2eec4-8bce-4afb-8eb9-e57417515312-openstack-config\") pod \"openstackclient\" (UID: \"e3a2eec4-8bce-4afb-8eb9-e57417515312\") " pod="openstack/openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.927354 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e3a2eec4-8bce-4afb-8eb9-e57417515312-openstack-config-secret\") pod \"openstackclient\" (UID: \"e3a2eec4-8bce-4afb-8eb9-e57417515312\") " pod="openstack/openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.927603 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3a2eec4-8bce-4afb-8eb9-e57417515312-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e3a2eec4-8bce-4afb-8eb9-e57417515312\") " pod="openstack/openstackclient" Sep 30 21:00:34 crc kubenswrapper[4919]: I0930 21:00:34.937708 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lglsd\" (UniqueName: \"kubernetes.io/projected/e3a2eec4-8bce-4afb-8eb9-e57417515312-kube-api-access-lglsd\") pod \"openstackclient\" (UID: \"e3a2eec4-8bce-4afb-8eb9-e57417515312\") " pod="openstack/openstackclient" Sep 30 21:00:35 crc kubenswrapper[4919]: I0930 21:00:35.063933 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 21:00:35 crc kubenswrapper[4919]: I0930 21:00:35.367324 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-create-pc5n6"] Sep 30 21:00:35 crc kubenswrapper[4919]: I0930 21:00:35.370184 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-pc5n6" Sep 30 21:00:35 crc kubenswrapper[4919]: I0930 21:00:35.385569 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-pc5n6"] Sep 30 21:00:35 crc kubenswrapper[4919]: I0930 21:00:35.546323 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk8lb\" (UniqueName: \"kubernetes.io/projected/d40b718b-eccc-4eb3-b782-b7f1e68b47ca-kube-api-access-wk8lb\") pod \"cloudkitty-db-create-pc5n6\" (UID: \"d40b718b-eccc-4eb3-b782-b7f1e68b47ca\") " pod="openstack/cloudkitty-db-create-pc5n6" Sep 30 21:00:35 crc kubenswrapper[4919]: I0930 21:00:35.648682 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk8lb\" (UniqueName: \"kubernetes.io/projected/d40b718b-eccc-4eb3-b782-b7f1e68b47ca-kube-api-access-wk8lb\") pod \"cloudkitty-db-create-pc5n6\" (UID: \"d40b718b-eccc-4eb3-b782-b7f1e68b47ca\") " pod="openstack/cloudkitty-db-create-pc5n6" Sep 30 21:00:35 crc kubenswrapper[4919]: I0930 21:00:35.680016 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk8lb\" (UniqueName: \"kubernetes.io/projected/d40b718b-eccc-4eb3-b782-b7f1e68b47ca-kube-api-access-wk8lb\") pod \"cloudkitty-db-create-pc5n6\" (UID: \"d40b718b-eccc-4eb3-b782-b7f1e68b47ca\") " pod="openstack/cloudkitty-db-create-pc5n6" Sep 30 21:00:35 crc kubenswrapper[4919]: I0930 21:00:35.707834 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-pc5n6" Sep 30 21:00:35 crc kubenswrapper[4919]: I0930 21:00:35.751809 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 30 21:00:36 crc kubenswrapper[4919]: I0930 21:00:36.026953 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e3a2eec4-8bce-4afb-8eb9-e57417515312","Type":"ContainerStarted","Data":"d7a3f7047cde5a323b793dd5f05e84af8b09c8ee279920e9db1252dce0663fb6"} Sep 30 21:00:36 crc kubenswrapper[4919]: I0930 21:00:36.031062 4919 generic.go:334] "Generic (PLEG): container finished" podID="1c228af8-0449-4b6c-95e3-ef80d378fbdd" containerID="fd413bcc61fed9e4e3db15fe7cc00e7e8e89cbf76a5c644b5b4ff172714a286a" exitCode=0 Sep 30 21:00:36 crc kubenswrapper[4919]: I0930 21:00:36.031122 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" event={"ID":"1c228af8-0449-4b6c-95e3-ef80d378fbdd","Type":"ContainerDied","Data":"fd413bcc61fed9e4e3db15fe7cc00e7e8e89cbf76a5c644b5b4ff172714a286a"} Sep 30 21:00:36 crc kubenswrapper[4919]: I0930 21:00:36.208823 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-pc5n6"] Sep 30 21:00:36 crc kubenswrapper[4919]: W0930 21:00:36.221303 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd40b718b_eccc_4eb3_b782_b7f1e68b47ca.slice/crio-c7289f1e42dc3b2d16adaa9f3dc57263e88de3de3f5e0ec5b28d32178b63aab3 WatchSource:0}: Error finding container c7289f1e42dc3b2d16adaa9f3dc57263e88de3de3f5e0ec5b28d32178b63aab3: Status 404 returned error can't find the container with id c7289f1e42dc3b2d16adaa9f3dc57263e88de3de3f5e0ec5b28d32178b63aab3 Sep 30 21:00:36 crc kubenswrapper[4919]: I0930 21:00:36.905604 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 21:00:36 crc kubenswrapper[4919]: I0930 21:00:36.911409 4919 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" podUID="e3a2eec4-8bce-4afb-8eb9-e57417515312" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.050087 4919 generic.go:334] "Generic (PLEG): container finished" podID="1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" containerID="8de83e3f365bbddd01d15ab2d3899b89bf7ec111571f7724f136bfe4ccac6364" exitCode=137 Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.050111 4919 scope.go:117] "RemoveContainer" containerID="8de83e3f365bbddd01d15ab2d3899b89bf7ec111571f7724f136bfe4ccac6364" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.050181 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.059561 4919 generic.go:334] "Generic (PLEG): container finished" podID="d40b718b-eccc-4eb3-b782-b7f1e68b47ca" containerID="39faf616a0c1500bdd9ca02b163c9227a0ee48b68042b730c073d85aceef7050" exitCode=0 Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.059783 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-pc5n6" event={"ID":"d40b718b-eccc-4eb3-b782-b7f1e68b47ca","Type":"ContainerDied","Data":"39faf616a0c1500bdd9ca02b163c9227a0ee48b68042b730c073d85aceef7050"} Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.059831 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-pc5n6" event={"ID":"d40b718b-eccc-4eb3-b782-b7f1e68b47ca","Type":"ContainerStarted","Data":"c7289f1e42dc3b2d16adaa9f3dc57263e88de3de3f5e0ec5b28d32178b63aab3"} Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.062150 4919 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" podUID="e3a2eec4-8bce-4afb-8eb9-e57417515312" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.063362 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e3a2eec4-8bce-4afb-8eb9-e57417515312","Type":"ContainerStarted","Data":"0d2556e8509983e4007b646be63346f1e386775ff7663976b8e848236e42faaa"} Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.076042 4919 scope.go:117] "RemoveContainer" containerID="8de83e3f365bbddd01d15ab2d3899b89bf7ec111571f7724f136bfe4ccac6364" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.076368 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-openstack-config-secret\") pod \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " Sep 30 21:00:37 crc kubenswrapper[4919]: E0930 21:00:37.076439 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8de83e3f365bbddd01d15ab2d3899b89bf7ec111571f7724f136bfe4ccac6364\": container with ID starting with 8de83e3f365bbddd01d15ab2d3899b89bf7ec111571f7724f136bfe4ccac6364 not found: ID does not exist" containerID="8de83e3f365bbddd01d15ab2d3899b89bf7ec111571f7724f136bfe4ccac6364" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.076558 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8de83e3f365bbddd01d15ab2d3899b89bf7ec111571f7724f136bfe4ccac6364"} err="failed to get container status \"8de83e3f365bbddd01d15ab2d3899b89bf7ec111571f7724f136bfe4ccac6364\": rpc error: code = NotFound desc = could not find container \"8de83e3f365bbddd01d15ab2d3899b89bf7ec111571f7724f136bfe4ccac6364\": container with ID starting with 8de83e3f365bbddd01d15ab2d3899b89bf7ec111571f7724f136bfe4ccac6364 not found: ID does not exist" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.076523 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-combined-ca-bundle\") pod \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.093528 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7t982\" (UniqueName: \"kubernetes.io/projected/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-kube-api-access-7t982\") pod \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.093839 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-openstack-config\") pod \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\" (UID: \"1c681877-84e0-4fd4-ab4a-e13fe3d4da9e\") " Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.095243 4919 generic.go:334] "Generic (PLEG): container finished" podID="1c228af8-0449-4b6c-95e3-ef80d378fbdd" containerID="d9f5ddf635c7debf10e5b6ab5904944aa8183d5512b2ae9c3ae5d1539d81eac0" exitCode=0 Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.095284 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" event={"ID":"1c228af8-0449-4b6c-95e3-ef80d378fbdd","Type":"ContainerDied","Data":"d9f5ddf635c7debf10e5b6ab5904944aa8183d5512b2ae9c3ae5d1539d81eac0"} Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.111118 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-kube-api-access-7t982" (OuterVolumeSpecName: "kube-api-access-7t982") pod "1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" (UID: "1c681877-84e0-4fd4-ab4a-e13fe3d4da9e"). InnerVolumeSpecName "kube-api-access-7t982". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.121830 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.121812844 podStartE2EDuration="3.121812844s" podCreationTimestamp="2025-09-30 21:00:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:00:37.11163788 +0000 UTC m=+2822.227671017" watchObservedRunningTime="2025-09-30 21:00:37.121812844 +0000 UTC m=+2822.237845971" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.126858 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" (UID: "1c681877-84e0-4fd4-ab4a-e13fe3d4da9e"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.156003 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" (UID: "1c681877-84e0-4fd4-ab4a-e13fe3d4da9e"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.156596 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" (UID: "1c681877-84e0-4fd4-ab4a-e13fe3d4da9e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.196628 4919 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.196671 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.196684 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7t982\" (UniqueName: \"kubernetes.io/projected/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-kube-api-access-7t982\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.196696 4919 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.391261 4919 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" podUID="e3a2eec4-8bce-4afb-8eb9-e57417515312" Sep 30 21:00:37 crc kubenswrapper[4919]: I0930 21:00:37.640739 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c681877-84e0-4fd4-ab4a-e13fe3d4da9e" path="/var/lib/kubelet/pods/1c681877-84e0-4fd4-ab4a-e13fe3d4da9e/volumes" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.468468 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7"] Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.471751 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.476099 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca-bundle" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.476415 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-http" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.476549 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-grpc" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.476674 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-dockercfg-5lv8b" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.476855 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-config" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.480432 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7"] Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.524911 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/182374fe-7fd0-4267-b938-396ef9eabd7f-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.524969 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182374fe-7fd0-4267-b938-396ef9eabd7f-config\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.524999 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/182374fe-7fd0-4267-b938-396ef9eabd7f-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.525045 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xq9v\" (UniqueName: \"kubernetes.io/projected/182374fe-7fd0-4267-b938-396ef9eabd7f-kube-api-access-7xq9v\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.525097 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/182374fe-7fd0-4267-b938-396ef9eabd7f-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.565968 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.630153 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/182374fe-7fd0-4267-b938-396ef9eabd7f-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.630386 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/182374fe-7fd0-4267-b938-396ef9eabd7f-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.630463 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182374fe-7fd0-4267-b938-396ef9eabd7f-config\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.630538 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/182374fe-7fd0-4267-b938-396ef9eabd7f-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.630568 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xq9v\" (UniqueName: \"kubernetes.io/projected/182374fe-7fd0-4267-b938-396ef9eabd7f-kube-api-access-7xq9v\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.632423 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/182374fe-7fd0-4267-b938-396ef9eabd7f-config\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.633301 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/182374fe-7fd0-4267-b938-396ef9eabd7f-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.641064 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/182374fe-7fd0-4267-b938-396ef9eabd7f-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.654622 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/182374fe-7fd0-4267-b938-396ef9eabd7f-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.678499 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb"] Sep 30 21:00:38 crc kubenswrapper[4919]: E0930 21:00:38.679364 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c228af8-0449-4b6c-95e3-ef80d378fbdd" containerName="util" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.679403 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c228af8-0449-4b6c-95e3-ef80d378fbdd" containerName="util" Sep 30 21:00:38 crc kubenswrapper[4919]: E0930 21:00:38.679446 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c228af8-0449-4b6c-95e3-ef80d378fbdd" containerName="pull" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.679452 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c228af8-0449-4b6c-95e3-ef80d378fbdd" containerName="pull" Sep 30 21:00:38 crc kubenswrapper[4919]: E0930 21:00:38.679485 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c228af8-0449-4b6c-95e3-ef80d378fbdd" containerName="extract" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.679491 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c228af8-0449-4b6c-95e3-ef80d378fbdd" containerName="extract" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.679753 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c228af8-0449-4b6c-95e3-ef80d378fbdd" containerName="extract" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.687938 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.688574 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xq9v\" (UniqueName: \"kubernetes.io/projected/182374fe-7fd0-4267-b938-396ef9eabd7f-kube-api-access-7xq9v\") pod \"cloudkitty-lokistack-distributor-bccccd5f6-zhbb7\" (UID: \"182374fe-7fd0-4267-b938-396ef9eabd7f\") " pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.734071 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c228af8-0449-4b6c-95e3-ef80d378fbdd-util\") pod \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\" (UID: \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\") " Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.734253 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxkv8\" (UniqueName: \"kubernetes.io/projected/1c228af8-0449-4b6c-95e3-ef80d378fbdd-kube-api-access-jxkv8\") pod \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\" (UID: \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\") " Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.734328 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c228af8-0449-4b6c-95e3-ef80d378fbdd-bundle\") pod \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\" (UID: \"1c228af8-0449-4b6c-95e3-ef80d378fbdd\") " Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.747336 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c228af8-0449-4b6c-95e3-ef80d378fbdd-bundle" (OuterVolumeSpecName: "bundle") pod "1c228af8-0449-4b6c-95e3-ef80d378fbdd" (UID: "1c228af8-0449-4b6c-95e3-ef80d378fbdd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.750323 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c228af8-0449-4b6c-95e3-ef80d378fbdd-kube-api-access-jxkv8" (OuterVolumeSpecName: "kube-api-access-jxkv8") pod "1c228af8-0449-4b6c-95e3-ef80d378fbdd" (UID: "1c228af8-0449-4b6c-95e3-ef80d378fbdd"). InnerVolumeSpecName "kube-api-access-jxkv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.755740 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb"] Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.767113 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c228af8-0449-4b6c-95e3-ef80d378fbdd-util" (OuterVolumeSpecName: "util") pod "1c228af8-0449-4b6c-95e3-ef80d378fbdd" (UID: "1c228af8-0449-4b6c-95e3-ef80d378fbdd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.794649 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp"] Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.796236 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.799869 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-grpc" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.800055 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"logging-loki-s3" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.801058 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-http" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.847431 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/77efc011-2683-4ff8-80f9-be0b81c8c7f4-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb\" (UID: \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.847515 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/77efc011-2683-4ff8-80f9-be0b81c8c7f4-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb\" (UID: \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.847676 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsrlv\" (UniqueName: \"kubernetes.io/projected/77efc011-2683-4ff8-80f9-be0b81c8c7f4-kube-api-access-qsrlv\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb\" (UID: \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.847980 4919 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c228af8-0449-4b6c-95e3-ef80d378fbdd-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.847997 4919 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c228af8-0449-4b6c-95e3-ef80d378fbdd-util\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.848007 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxkv8\" (UniqueName: \"kubernetes.io/projected/1c228af8-0449-4b6c-95e3-ef80d378fbdd-kube-api-access-jxkv8\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.870459 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp"] Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.877740 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-pc5n6" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.887121 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.905750 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd"] Sep 30 21:00:38 crc kubenswrapper[4919]: E0930 21:00:38.906172 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d40b718b-eccc-4eb3-b782-b7f1e68b47ca" containerName="mariadb-database-create" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.906198 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="d40b718b-eccc-4eb3-b782-b7f1e68b47ca" containerName="mariadb-database-create" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.906452 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="d40b718b-eccc-4eb3-b782-b7f1e68b47ca" containerName="mariadb-database-create" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.907327 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.920825 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-http" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.925687 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-grpc" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.949404 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsrlv\" (UniqueName: \"kubernetes.io/projected/77efc011-2683-4ff8-80f9-be0b81c8c7f4-kube-api-access-qsrlv\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb\" (UID: \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.949507 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/300e57fc-d2df-468a-8c02-0bff21cd53c1-config\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.949533 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkmp8\" (UniqueName: \"kubernetes.io/projected/300e57fc-d2df-468a-8c02-0bff21cd53c1-kube-api-access-vkmp8\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.949565 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/300e57fc-d2df-468a-8c02-0bff21cd53c1-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.949666 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/77efc011-2683-4ff8-80f9-be0b81c8c7f4-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb\" (UID: \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.949691 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/300e57fc-d2df-468a-8c02-0bff21cd53c1-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.949727 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/77efc011-2683-4ff8-80f9-be0b81c8c7f4-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb\" (UID: \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.949773 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/300e57fc-d2df-468a-8c02-0bff21cd53c1-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.949830 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/300e57fc-d2df-468a-8c02-0bff21cd53c1-logging-loki-s3\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.950768 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/77efc011-2683-4ff8-80f9-be0b81c8c7f4-util\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb\" (UID: \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.951019 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/77efc011-2683-4ff8-80f9-be0b81c8c7f4-bundle\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb\" (UID: \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.952344 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd"] Sep 30 21:00:38 crc kubenswrapper[4919]: I0930 21:00:38.991738 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsrlv\" (UniqueName: \"kubernetes.io/projected/77efc011-2683-4ff8-80f9-be0b81c8c7f4-kube-api-access-qsrlv\") pod \"a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb\" (UID: \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\") " pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.055172 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.056336 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.061885 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk8lb\" (UniqueName: \"kubernetes.io/projected/d40b718b-eccc-4eb3-b782-b7f1e68b47ca-kube-api-access-wk8lb\") pod \"d40b718b-eccc-4eb3-b782-b7f1e68b47ca\" (UID: \"d40b718b-eccc-4eb3-b782-b7f1e68b47ca\") " Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.062374 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h72jx\" (UniqueName: \"kubernetes.io/projected/8a614d9b-d891-48aa-9a64-d6b5187a8f73-kube-api-access-h72jx\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.062452 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/300e57fc-d2df-468a-8c02-0bff21cd53c1-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.062513 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/8a614d9b-d891-48aa-9a64-d6b5187a8f73-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.062546 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/300e57fc-d2df-468a-8c02-0bff21cd53c1-logging-loki-s3\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.062707 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/300e57fc-d2df-468a-8c02-0bff21cd53c1-config\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.062727 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkmp8\" (UniqueName: \"kubernetes.io/projected/300e57fc-d2df-468a-8c02-0bff21cd53c1-kube-api-access-vkmp8\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.062790 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/300e57fc-d2df-468a-8c02-0bff21cd53c1-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.062817 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/8a614d9b-d891-48aa-9a64-d6b5187a8f73-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.062971 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a614d9b-d891-48aa-9a64-d6b5187a8f73-config\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.063003 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/300e57fc-d2df-468a-8c02-0bff21cd53c1-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.063055 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a614d9b-d891-48aa-9a64-d6b5187a8f73-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.067855 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/300e57fc-d2df-468a-8c02-0bff21cd53c1-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.068260 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d40b718b-eccc-4eb3-b782-b7f1e68b47ca-kube-api-access-wk8lb" (OuterVolumeSpecName: "kube-api-access-wk8lb") pod "d40b718b-eccc-4eb3-b782-b7f1e68b47ca" (UID: "d40b718b-eccc-4eb3-b782-b7f1e68b47ca"). InnerVolumeSpecName "kube-api-access-wk8lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.073791 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/300e57fc-d2df-468a-8c02-0bff21cd53c1-config\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.074360 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-http" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.075736 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/300e57fc-d2df-468a-8c02-0bff21cd53c1-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.081033 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/300e57fc-d2df-468a-8c02-0bff21cd53c1-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.081872 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.082111 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.082273 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway-ca-bundle" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.082465 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.082657 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-client-http" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.083205 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-dockercfg-25ff5" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.085387 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/300e57fc-d2df-468a-8c02-0bff21cd53c1-logging-loki-s3\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.116616 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkmp8\" (UniqueName: \"kubernetes.io/projected/300e57fc-d2df-468a-8c02-0bff21cd53c1-kube-api-access-vkmp8\") pod \"cloudkitty-lokistack-querier-6b6cdc96db-9xqzp\" (UID: \"300e57fc-d2df-468a-8c02-0bff21cd53c1\") " pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.165782 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" event={"ID":"1c228af8-0449-4b6c-95e3-ef80d378fbdd","Type":"ContainerDied","Data":"e1d81f7908f2ef046373875b163042fd80a7e35694c79655b83a11086d77d0db"} Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.166121 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1d81f7908f2ef046373875b163042fd80a7e35694c79655b83a11086d77d0db" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.166231 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.172364 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-pc5n6" event={"ID":"d40b718b-eccc-4eb3-b782-b7f1e68b47ca","Type":"ContainerDied","Data":"c7289f1e42dc3b2d16adaa9f3dc57263e88de3de3f5e0ec5b28d32178b63aab3"} Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.172400 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7289f1e42dc3b2d16adaa9f3dc57263e88de3de3f5e0ec5b28d32178b63aab3" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.172471 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-pc5n6" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.172829 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.172871 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-tls-secret\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.172906 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.172998 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a614d9b-d891-48aa-9a64-d6b5187a8f73-config\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.173046 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a614d9b-d891-48aa-9a64-d6b5187a8f73-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.173096 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzvd2\" (UniqueName: \"kubernetes.io/projected/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-kube-api-access-pzvd2\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.173961 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.174018 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h72jx\" (UniqueName: \"kubernetes.io/projected/8a614d9b-d891-48aa-9a64-d6b5187a8f73-kube-api-access-h72jx\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.174113 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/8a614d9b-d891-48aa-9a64-d6b5187a8f73-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.174146 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-tenants\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.174252 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.174522 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.174563 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/8a614d9b-d891-48aa-9a64-d6b5187a8f73-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.174619 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.174651 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-rbac\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.175307 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a614d9b-d891-48aa-9a64-d6b5187a8f73-config\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.177381 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a614d9b-d891-48aa-9a64-d6b5187a8f73-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.181854 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/8a614d9b-d891-48aa-9a64-d6b5187a8f73-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.190144 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.190418 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk8lb\" (UniqueName: \"kubernetes.io/projected/d40b718b-eccc-4eb3-b782-b7f1e68b47ca-kube-api-access-wk8lb\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.192503 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/8a614d9b-d891-48aa-9a64-d6b5187a8f73-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.198074 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h72jx\" (UniqueName: \"kubernetes.io/projected/8a614d9b-d891-48aa-9a64-d6b5187a8f73-kube-api-access-h72jx\") pod \"cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd\" (UID: \"8a614d9b-d891-48aa-9a64-d6b5187a8f73\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.220282 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.221561 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.241919 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.242948 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.253888 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.298044 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.298128 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzvd2\" (UniqueName: \"kubernetes.io/projected/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-kube-api-access-pzvd2\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.298162 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/0dde18df-d1bd-4b36-82af-cd0967cd942b-tls-secret\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.298798 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-rbac\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.298875 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-tenants\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.298943 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.298999 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.299028 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/0dde18df-d1bd-4b36-82af-cd0967cd942b-tenants\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.299075 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/0dde18df-d1bd-4b36-82af-cd0967cd942b-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.299106 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.299143 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.299200 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.299227 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.299315 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk45r\" (UniqueName: \"kubernetes.io/projected/0dde18df-d1bd-4b36-82af-cd0967cd942b-kube-api-access-mk45r\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.299344 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-rbac\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.299371 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.299391 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.299409 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-tls-secret\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.304417 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.304554 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.304694 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.305287 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.305367 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-rbac\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.306550 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-tls-secret\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.309824 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-tenants\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.313811 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.334059 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzvd2\" (UniqueName: \"kubernetes.io/projected/53a8fa4c-0ba7-4e41-86e5-e4e767126bc3-kube-api-access-pzvd2\") pod \"cloudkitty-lokistack-gateway-89dc74b89-6jf89\" (UID: \"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: E0930 21:00:39.398295 4919 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd40b718b_eccc_4eb3_b782_b7f1e68b47ca.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c228af8_0449_4b6c_95e3_ef80d378fbdd.slice\": RecentStats: unable to find data in memory cache]" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.404943 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.405003 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/0dde18df-d1bd-4b36-82af-cd0967cd942b-tls-secret\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.405031 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-rbac\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.405107 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.406738 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.407393 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/0dde18df-d1bd-4b36-82af-cd0967cd942b-tenants\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.407463 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.407485 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/0dde18df-d1bd-4b36-82af-cd0967cd942b-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.407549 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.407649 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.407688 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk45r\" (UniqueName: \"kubernetes.io/projected/0dde18df-d1bd-4b36-82af-cd0967cd942b-kube-api-access-mk45r\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.413422 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-rbac\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.413756 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.413832 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0dde18df-d1bd-4b36-82af-cd0967cd942b-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.419107 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/0dde18df-d1bd-4b36-82af-cd0967cd942b-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.422751 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.432579 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/0dde18df-d1bd-4b36-82af-cd0967cd942b-tls-secret\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.432874 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/0dde18df-d1bd-4b36-82af-cd0967cd942b-tenants\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.470559 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk45r\" (UniqueName: \"kubernetes.io/projected/0dde18df-d1bd-4b36-82af-cd0967cd942b-kube-api-access-mk45r\") pod \"cloudkitty-lokistack-gateway-89dc74b89-bs96r\" (UID: \"0dde18df-d1bd-4b36-82af-cd0967cd942b\") " pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.591794 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.684929 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.686635 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.690181 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-http" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.690442 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-grpc" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.707276 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.731440 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.809600 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.814875 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/85f27421-a520-4043-b8d5-7729b07a0bed-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.814935 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/85f27421-a520-4043-b8d5-7729b07a0bed-logging-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.815052 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-66373b7e-5278-4c0d-9283-7efc9c923e52\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-66373b7e-5278-4c0d-9283-7efc9c923e52\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.815088 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85f27421-a520-4043-b8d5-7729b07a0bed-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.815139 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-92a40970-0d7b-40c5-9be2-5b8815753c9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-92a40970-0d7b-40c5-9be2-5b8815753c9f\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.815164 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh785\" (UniqueName: \"kubernetes.io/projected/85f27421-a520-4043-b8d5-7729b07a0bed-kube-api-access-lh785\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.815496 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85f27421-a520-4043-b8d5-7729b07a0bed-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.815543 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/85f27421-a520-4043-b8d5-7729b07a0bed-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.818281 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:39 crc kubenswrapper[4919]: W0930 21:00:39.819498 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77efc011_2683_4ff8_80f9_be0b81c8c7f4.slice/crio-82870f700eaa8e09be3e89b2458ce9a3426a6ad098f22b430a67d2778408d5c5 WatchSource:0}: Error finding container 82870f700eaa8e09be3e89b2458ce9a3426a6ad098f22b430a67d2778408d5c5: Status 404 returned error can't find the container with id 82870f700eaa8e09be3e89b2458ce9a3426a6ad098f22b430a67d2778408d5c5 Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.820548 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-http" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.820711 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-grpc" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.822957 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.843060 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917044 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0c6a0e4a-b52a-4312-a36e-94c6f709200a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917315 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/0c6a0e4a-b52a-4312-a36e-94c6f709200a-logging-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917347 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85f27421-a520-4043-b8d5-7729b07a0bed-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917367 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/0c6a0e4a-b52a-4312-a36e-94c6f709200a-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917395 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e7c46755-c7dc-4b7f-917a-aecdd6a7157d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e7c46755-c7dc-4b7f-917a-aecdd6a7157d\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917413 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/85f27421-a520-4043-b8d5-7729b07a0bed-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917451 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/85f27421-a520-4043-b8d5-7729b07a0bed-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917467 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/85f27421-a520-4043-b8d5-7729b07a0bed-logging-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917530 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-66373b7e-5278-4c0d-9283-7efc9c923e52\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-66373b7e-5278-4c0d-9283-7efc9c923e52\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917554 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85f27421-a520-4043-b8d5-7729b07a0bed-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917584 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-92a40970-0d7b-40c5-9be2-5b8815753c9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-92a40970-0d7b-40c5-9be2-5b8815753c9f\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917599 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh785\" (UniqueName: \"kubernetes.io/projected/85f27421-a520-4043-b8d5-7729b07a0bed-kube-api-access-lh785\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917627 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/0c6a0e4a-b52a-4312-a36e-94c6f709200a-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917663 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c6a0e4a-b52a-4312-a36e-94c6f709200a-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.917684 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mx88\" (UniqueName: \"kubernetes.io/projected/0c6a0e4a-b52a-4312-a36e-94c6f709200a-kube-api-access-6mx88\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.918606 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85f27421-a520-4043-b8d5-7729b07a0bed-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.919427 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85f27421-a520-4043-b8d5-7729b07a0bed-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.921433 4919 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.921464 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-92a40970-0d7b-40c5-9be2-5b8815753c9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-92a40970-0d7b-40c5-9be2-5b8815753c9f\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/8b9e8cd44ca97a1d4d1be6dde5887e029fc703cec61eafb813645dacb9a6f3ab/globalmount\"" pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.921742 4919 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.921783 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-66373b7e-5278-4c0d-9283-7efc9c923e52\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-66373b7e-5278-4c0d-9283-7efc9c923e52\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/05899b855ea401a0a00d83e349a9efc9a3f1efb66a22c62771197ae9dde5f320/globalmount\"" pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.924398 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/85f27421-a520-4043-b8d5-7729b07a0bed-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.924462 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/85f27421-a520-4043-b8d5-7729b07a0bed-logging-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.927995 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/85f27421-a520-4043-b8d5-7729b07a0bed-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.934182 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh785\" (UniqueName: \"kubernetes.io/projected/85f27421-a520-4043-b8d5-7729b07a0bed-kube-api-access-lh785\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.946863 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.949199 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.951933 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-grpc" Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.951947 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-http" Sep 30 21:00:39 crc kubenswrapper[4919]: W0930 21:00:39.956504 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a614d9b_d891_48aa_9a64_d6b5187a8f73.slice/crio-314dd77002f4c402e79f86f16e1ddb0bf7a3f1819aeae24a6c916dae040ed2e7 WatchSource:0}: Error finding container 314dd77002f4c402e79f86f16e1ddb0bf7a3f1819aeae24a6c916dae040ed2e7: Status 404 returned error can't find the container with id 314dd77002f4c402e79f86f16e1ddb0bf7a3f1819aeae24a6c916dae040ed2e7 Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.959658 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.968951 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Sep 30 21:00:39 crc kubenswrapper[4919]: I0930 21:00:39.983807 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp"] Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.005313 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-92a40970-0d7b-40c5-9be2-5b8815753c9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-92a40970-0d7b-40c5-9be2-5b8815753c9f\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020001 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020082 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0c6a0e4a-b52a-4312-a36e-94c6f709200a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020122 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/0c6a0e4a-b52a-4312-a36e-94c6f709200a-logging-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020145 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/0c6a0e4a-b52a-4312-a36e-94c6f709200a-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020162 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqljs\" (UniqueName: \"kubernetes.io/projected/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-kube-api-access-sqljs\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020193 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e7c46755-c7dc-4b7f-917a-aecdd6a7157d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e7c46755-c7dc-4b7f-917a-aecdd6a7157d\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020241 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020416 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-94d8777f-a24f-4de7-8ed2-12562e038126\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-94d8777f-a24f-4de7-8ed2-12562e038126\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020487 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020563 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/0c6a0e4a-b52a-4312-a36e-94c6f709200a-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020586 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-logging-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020613 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020658 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c6a0e4a-b52a-4312-a36e-94c6f709200a-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.020686 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mx88\" (UniqueName: \"kubernetes.io/projected/0c6a0e4a-b52a-4312-a36e-94c6f709200a-kube-api-access-6mx88\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.022149 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0c6a0e4a-b52a-4312-a36e-94c6f709200a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.023044 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c6a0e4a-b52a-4312-a36e-94c6f709200a-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.026179 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/0c6a0e4a-b52a-4312-a36e-94c6f709200a-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.026323 4919 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.026354 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e7c46755-c7dc-4b7f-917a-aecdd6a7157d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e7c46755-c7dc-4b7f-917a-aecdd6a7157d\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b90db69ab55bead81f1a6247bfd39397834b425baa692fcecbc77a4529080f4d/globalmount\"" pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.026506 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/0c6a0e4a-b52a-4312-a36e-94c6f709200a-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.030192 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-66373b7e-5278-4c0d-9283-7efc9c923e52\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-66373b7e-5278-4c0d-9283-7efc9c923e52\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85f27421-a520-4043-b8d5-7729b07a0bed\") " pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.030792 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/0c6a0e4a-b52a-4312-a36e-94c6f709200a-logging-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.041512 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mx88\" (UniqueName: \"kubernetes.io/projected/0c6a0e4a-b52a-4312-a36e-94c6f709200a-kube-api-access-6mx88\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.071637 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e7c46755-c7dc-4b7f-917a-aecdd6a7157d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e7c46755-c7dc-4b7f-917a-aecdd6a7157d\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"0c6a0e4a-b52a-4312-a36e-94c6f709200a\") " pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.122831 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqljs\" (UniqueName: \"kubernetes.io/projected/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-kube-api-access-sqljs\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.122926 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.122966 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-94d8777f-a24f-4de7-8ed2-12562e038126\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-94d8777f-a24f-4de7-8ed2-12562e038126\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.123009 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.123043 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-logging-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.123062 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.123113 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.125094 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.125154 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.129909 4919 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.130095 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-94d8777f-a24f-4de7-8ed2-12562e038126\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-94d8777f-a24f-4de7-8ed2-12562e038126\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e26cb4002b1d33a0c12e2c4a5457505238c0e2f908a1498459d14743d102c311/globalmount\"" pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.130118 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.129919 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.130997 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logging-loki-s3\" (UniqueName: \"kubernetes.io/secret/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-logging-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.148114 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.159889 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqljs\" (UniqueName: \"kubernetes.io/projected/871d1c5f-b69f-44c9-88c0-f72c11d61eb5-kube-api-access-sqljs\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.171132 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89"] Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.207349 4919 generic.go:334] "Generic (PLEG): container finished" podID="77efc011-2683-4ff8-80f9-be0b81c8c7f4" containerID="7a301d76f8c413a603a4e159d27c8050e783b7b5ddb43104d650e4318a4e6ea3" exitCode=0 Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.207562 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" event={"ID":"77efc011-2683-4ff8-80f9-be0b81c8c7f4","Type":"ContainerDied","Data":"7a301d76f8c413a603a4e159d27c8050e783b7b5ddb43104d650e4318a4e6ea3"} Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.207616 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" event={"ID":"77efc011-2683-4ff8-80f9-be0b81c8c7f4","Type":"ContainerStarted","Data":"82870f700eaa8e09be3e89b2458ce9a3426a6ad098f22b430a67d2778408d5c5"} Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.213753 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" event={"ID":"8a614d9b-d891-48aa-9a64-d6b5187a8f73","Type":"ContainerStarted","Data":"314dd77002f4c402e79f86f16e1ddb0bf7a3f1819aeae24a6c916dae040ed2e7"} Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.219679 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" event={"ID":"182374fe-7fd0-4267-b938-396ef9eabd7f","Type":"ContainerStarted","Data":"9cfe4cf74ae67d19290b970a3d91939c7abd2154f90ca5c1e49f990a5d7aaaed"} Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.223036 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-94d8777f-a24f-4de7-8ed2-12562e038126\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-94d8777f-a24f-4de7-8ed2-12562e038126\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"871d1c5f-b69f-44c9-88c0-f72c11d61eb5\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.233305 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" event={"ID":"300e57fc-d2df-468a-8c02-0bff21cd53c1","Type":"ContainerStarted","Data":"4d1a89d186832869e0f8a9eb9a7499532b4d348b5cea1ed740d283803734a7a3"} Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.237739 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.246555 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.249040 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.250198 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-86lzz" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.251360 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.269209 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.271503 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:40 crc kubenswrapper[4919]: W0930 21:00:40.325818 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0dde18df_d1bd_4b36_82af_cd0967cd942b.slice/crio-b9c0ae4e010e66e3e4596ba3275d56cc037b8ba007490f633731e62d6b90c1a1 WatchSource:0}: Error finding container b9c0ae4e010e66e3e4596ba3275d56cc037b8ba007490f633731e62d6b90c1a1: Status 404 returned error can't find the container with id b9c0ae4e010e66e3e4596ba3275d56cc037b8ba007490f633731e62d6b90c1a1 Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.332272 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.332966 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-scripts\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.333109 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-config-data\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.333292 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-log-httpd\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.333498 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vww6g\" (UniqueName: \"kubernetes.io/projected/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-kube-api-access-vww6g\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.333615 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-run-httpd\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.333740 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.344785 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r"] Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.435419 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-scripts\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.435477 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-config-data\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.435514 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-log-httpd\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.435588 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vww6g\" (UniqueName: \"kubernetes.io/projected/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-kube-api-access-vww6g\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.435614 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-run-httpd\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.435637 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.436655 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-log-httpd\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.441041 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-scripts\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.441759 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.441889 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-run-httpd\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.445536 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-config-data\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.459891 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vww6g\" (UniqueName: \"kubernetes.io/projected/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-kube-api-access-vww6g\") pod \"ceilometer-0\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.589308 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.687932 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.824336 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Sep 30 21:00:40 crc kubenswrapper[4919]: W0930 21:00:40.837949 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod871d1c5f_b69f_44c9_88c0_f72c11d61eb5.slice/crio-a60e18a7635cecc75137b86d3fa9d1e6c1a74c714d4bf413011a371452ff8967 WatchSource:0}: Error finding container a60e18a7635cecc75137b86d3fa9d1e6c1a74c714d4bf413011a371452ff8967: Status 404 returned error can't find the container with id a60e18a7635cecc75137b86d3fa9d1e6c1a74c714d4bf413011a371452ff8967 Sep 30 21:00:40 crc kubenswrapper[4919]: I0930 21:00:40.931851 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Sep 30 21:00:40 crc kubenswrapper[4919]: W0930 21:00:40.935115 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85f27421_a520_4043_b8d5_7729b07a0bed.slice/crio-3076ede594c943d3a0b9e09049bb4d0e3f2a6053b553bc170337d870bd6dfd47 WatchSource:0}: Error finding container 3076ede594c943d3a0b9e09049bb4d0e3f2a6053b553bc170337d870bd6dfd47: Status 404 returned error can't find the container with id 3076ede594c943d3a0b9e09049bb4d0e3f2a6053b553bc170337d870bd6dfd47 Sep 30 21:00:41 crc kubenswrapper[4919]: W0930 21:00:41.090314 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bfa6a29_fbaa_43d1_9d10_dcd440f5a367.slice/crio-3922a09ca5ae95614de42d3f06df8d0024ff39b73527313604e41d1e9dc0638a WatchSource:0}: Error finding container 3922a09ca5ae95614de42d3f06df8d0024ff39b73527313604e41d1e9dc0638a: Status 404 returned error can't find the container with id 3922a09ca5ae95614de42d3f06df8d0024ff39b73527313604e41d1e9dc0638a Sep 30 21:00:41 crc kubenswrapper[4919]: I0930 21:00:41.095658 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:00:41 crc kubenswrapper[4919]: I0930 21:00:41.164930 4919 scope.go:117] "RemoveContainer" containerID="408b54bc5e3dcd322bbbee70f478dbfdc1358b8065f2a123198fa1e5cf5ca212" Sep 30 21:00:41 crc kubenswrapper[4919]: I0930 21:00:41.245327 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" event={"ID":"0dde18df-d1bd-4b36-82af-cd0967cd942b","Type":"ContainerStarted","Data":"b9c0ae4e010e66e3e4596ba3275d56cc037b8ba007490f633731e62d6b90c1a1"} Sep 30 21:00:41 crc kubenswrapper[4919]: I0930 21:00:41.247831 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367","Type":"ContainerStarted","Data":"3922a09ca5ae95614de42d3f06df8d0024ff39b73527313604e41d1e9dc0638a"} Sep 30 21:00:41 crc kubenswrapper[4919]: I0930 21:00:41.249786 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" event={"ID":"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3","Type":"ContainerStarted","Data":"345a7e34fd8dfdc58789ba9d0b9fcb3c2de20f368701db3c1559623aaf734cf8"} Sep 30 21:00:41 crc kubenswrapper[4919]: I0930 21:00:41.251338 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"871d1c5f-b69f-44c9-88c0-f72c11d61eb5","Type":"ContainerStarted","Data":"a60e18a7635cecc75137b86d3fa9d1e6c1a74c714d4bf413011a371452ff8967"} Sep 30 21:00:41 crc kubenswrapper[4919]: I0930 21:00:41.252613 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"85f27421-a520-4043-b8d5-7729b07a0bed","Type":"ContainerStarted","Data":"3076ede594c943d3a0b9e09049bb4d0e3f2a6053b553bc170337d870bd6dfd47"} Sep 30 21:00:41 crc kubenswrapper[4919]: I0930 21:00:41.261570 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"0c6a0e4a-b52a-4312-a36e-94c6f709200a","Type":"ContainerStarted","Data":"17081e4bad66b259bc100852c4258bcd5df52a5dc2c09a12f11aa40ce447ff6c"} Sep 30 21:00:42 crc kubenswrapper[4919]: I0930 21:00:42.308689 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367","Type":"ContainerStarted","Data":"daeecf66698be80fe3c7574c410c95c80277fe3e8c672e767f6cfd54c4122920"} Sep 30 21:00:43 crc kubenswrapper[4919]: I0930 21:00:43.340698 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367","Type":"ContainerStarted","Data":"09af640df83d0ef1807067dc43c011a841ea3c7acf0e768f58ab60c4c08fc840"} Sep 30 21:00:45 crc kubenswrapper[4919]: I0930 21:00:45.513782 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-fb7a-account-create-js49h"] Sep 30 21:00:45 crc kubenswrapper[4919]: I0930 21:00:45.520309 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-fb7a-account-create-js49h" Sep 30 21:00:45 crc kubenswrapper[4919]: I0930 21:00:45.523298 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-db-secret" Sep 30 21:00:45 crc kubenswrapper[4919]: I0930 21:00:45.525079 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-fb7a-account-create-js49h"] Sep 30 21:00:45 crc kubenswrapper[4919]: I0930 21:00:45.569504 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmlsx\" (UniqueName: \"kubernetes.io/projected/c3f9fc61-e4ed-4943-a0a3-f152aa21d724-kube-api-access-dmlsx\") pod \"cloudkitty-fb7a-account-create-js49h\" (UID: \"c3f9fc61-e4ed-4943-a0a3-f152aa21d724\") " pod="openstack/cloudkitty-fb7a-account-create-js49h" Sep 30 21:00:45 crc kubenswrapper[4919]: I0930 21:00:45.671955 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmlsx\" (UniqueName: \"kubernetes.io/projected/c3f9fc61-e4ed-4943-a0a3-f152aa21d724-kube-api-access-dmlsx\") pod \"cloudkitty-fb7a-account-create-js49h\" (UID: \"c3f9fc61-e4ed-4943-a0a3-f152aa21d724\") " pod="openstack/cloudkitty-fb7a-account-create-js49h" Sep 30 21:00:45 crc kubenswrapper[4919]: I0930 21:00:45.711251 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmlsx\" (UniqueName: \"kubernetes.io/projected/c3f9fc61-e4ed-4943-a0a3-f152aa21d724-kube-api-access-dmlsx\") pod \"cloudkitty-fb7a-account-create-js49h\" (UID: \"c3f9fc61-e4ed-4943-a0a3-f152aa21d724\") " pod="openstack/cloudkitty-fb7a-account-create-js49h" Sep 30 21:00:45 crc kubenswrapper[4919]: I0930 21:00:45.845307 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-fb7a-account-create-js49h" Sep 30 21:00:46 crc kubenswrapper[4919]: I0930 21:00:46.376627 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" event={"ID":"300e57fc-d2df-468a-8c02-0bff21cd53c1","Type":"ContainerStarted","Data":"6d1f9c17af96555cb537ad60edab8a8a9321a3a3a7a22815c7cdcfb94a477b5d"} Sep 30 21:00:46 crc kubenswrapper[4919]: I0930 21:00:46.377009 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:00:46 crc kubenswrapper[4919]: I0930 21:00:46.383896 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"0c6a0e4a-b52a-4312-a36e-94c6f709200a","Type":"ContainerStarted","Data":"079a03b232c298030ce19e7254bb5678edcc44a8762bde202bb33b8fc7defbe5"} Sep 30 21:00:46 crc kubenswrapper[4919]: I0930 21:00:46.384449 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:00:46 crc kubenswrapper[4919]: I0930 21:00:46.394254 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" podStartSLOduration=2.666072707 podStartE2EDuration="8.394221029s" podCreationTimestamp="2025-09-30 21:00:38 +0000 UTC" firstStartedPulling="2025-09-30 21:00:40.000418975 +0000 UTC m=+2825.116452102" lastFinishedPulling="2025-09-30 21:00:45.728567297 +0000 UTC m=+2830.844600424" observedRunningTime="2025-09-30 21:00:46.392493719 +0000 UTC m=+2831.508526846" watchObservedRunningTime="2025-09-30 21:00:46.394221029 +0000 UTC m=+2831.510254156" Sep 30 21:00:46 crc kubenswrapper[4919]: I0930 21:00:46.416588 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-compactor-0" podStartSLOduration=3.409309462 podStartE2EDuration="8.416571405s" podCreationTimestamp="2025-09-30 21:00:38 +0000 UTC" firstStartedPulling="2025-09-30 21:00:40.728906474 +0000 UTC m=+2825.844939601" lastFinishedPulling="2025-09-30 21:00:45.736168427 +0000 UTC m=+2830.852201544" observedRunningTime="2025-09-30 21:00:46.414251258 +0000 UTC m=+2831.530284395" watchObservedRunningTime="2025-09-30 21:00:46.416571405 +0000 UTC m=+2831.532604532" Sep 30 21:00:46 crc kubenswrapper[4919]: I0930 21:00:46.488954 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-fb7a-account-create-js49h"] Sep 30 21:00:46 crc kubenswrapper[4919]: W0930 21:00:46.524087 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3f9fc61_e4ed_4943_a0a3_f152aa21d724.slice/crio-7661931f9bc4fdac4d7a933f33dd22307a1257bd12ba95eadd8066066088a478 WatchSource:0}: Error finding container 7661931f9bc4fdac4d7a933f33dd22307a1257bd12ba95eadd8066066088a478: Status 404 returned error can't find the container with id 7661931f9bc4fdac4d7a933f33dd22307a1257bd12ba95eadd8066066088a478 Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.394281 4919 generic.go:334] "Generic (PLEG): container finished" podID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" containerID="5ab229daaaedb62f62edff11b88dd84a698159f265bcca6975a4f94446565e3d" exitCode=2 Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.394471 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" event={"ID":"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3","Type":"ContainerDied","Data":"5ab229daaaedb62f62edff11b88dd84a698159f265bcca6975a4f94446565e3d"} Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.394977 4919 scope.go:117] "RemoveContainer" containerID="5ab229daaaedb62f62edff11b88dd84a698159f265bcca6975a4f94446565e3d" Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.399324 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"871d1c5f-b69f-44c9-88c0-f72c11d61eb5","Type":"ContainerStarted","Data":"5e02105419d9c786bf2ac92d80d0b2ca30b89ce9a6b08ebab5e7bd279f78c445"} Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.399672 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.422863 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" event={"ID":"182374fe-7fd0-4267-b938-396ef9eabd7f","Type":"ContainerStarted","Data":"017a290db877680520595a3c3bd74462bfd57e0b2ca0eb5ba6b41ab4ed147eac"} Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.422990 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.433660 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"85f27421-a520-4043-b8d5-7729b07a0bed","Type":"ContainerStarted","Data":"bfc6e26d81f3212767f16adc7aa3dea06487399ffd949403277a6f2b08d64c84"} Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.434438 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.437162 4919 generic.go:334] "Generic (PLEG): container finished" podID="0dde18df-d1bd-4b36-82af-cd0967cd942b" containerID="7c9792e8712884e019d11f3f5a2c0e3eb87d2ed1fd9867f7088e06ce4a240788" exitCode=2 Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.438732 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" event={"ID":"0dde18df-d1bd-4b36-82af-cd0967cd942b","Type":"ContainerDied","Data":"7c9792e8712884e019d11f3f5a2c0e3eb87d2ed1fd9867f7088e06ce4a240788"} Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.439361 4919 scope.go:117] "RemoveContainer" containerID="7c9792e8712884e019d11f3f5a2c0e3eb87d2ed1fd9867f7088e06ce4a240788" Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.445908 4919 generic.go:334] "Generic (PLEG): container finished" podID="77efc011-2683-4ff8-80f9-be0b81c8c7f4" containerID="c5d657b9eb602bc2d9f52e8154000efd168e291efa717af3f7669b33d5ba8c3c" exitCode=0 Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.446023 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" event={"ID":"77efc011-2683-4ff8-80f9-be0b81c8c7f4","Type":"ContainerDied","Data":"c5d657b9eb602bc2d9f52e8154000efd168e291efa717af3f7669b33d5ba8c3c"} Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.449674 4919 generic.go:334] "Generic (PLEG): container finished" podID="c3f9fc61-e4ed-4943-a0a3-f152aa21d724" containerID="2e663a6a112046231ebdd0007687a7ff1a11f7c082fb453719fffe9970e213fd" exitCode=0 Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.449741 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-fb7a-account-create-js49h" event={"ID":"c3f9fc61-e4ed-4943-a0a3-f152aa21d724","Type":"ContainerDied","Data":"2e663a6a112046231ebdd0007687a7ff1a11f7c082fb453719fffe9970e213fd"} Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.449767 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-fb7a-account-create-js49h" event={"ID":"c3f9fc61-e4ed-4943-a0a3-f152aa21d724","Type":"ContainerStarted","Data":"7661931f9bc4fdac4d7a933f33dd22307a1257bd12ba95eadd8066066088a478"} Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.453061 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" event={"ID":"8a614d9b-d891-48aa-9a64-d6b5187a8f73","Type":"ContainerStarted","Data":"39e0399235f78751d3e75d3891c6c48bd76107c321667a4de0f12fad7577ba5a"} Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.453195 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.460482 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" podStartSLOduration=3.316767185 podStartE2EDuration="9.460452935s" podCreationTimestamp="2025-09-30 21:00:38 +0000 UTC" firstStartedPulling="2025-09-30 21:00:39.699662647 +0000 UTC m=+2824.815695774" lastFinishedPulling="2025-09-30 21:00:45.843348397 +0000 UTC m=+2830.959381524" observedRunningTime="2025-09-30 21:00:47.443905007 +0000 UTC m=+2832.559938134" watchObservedRunningTime="2025-09-30 21:00:47.460452935 +0000 UTC m=+2832.576486062" Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.475248 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367","Type":"ContainerStarted","Data":"6e4704d4dc34d5bad4423ee87075b553bebf0a480670ced4b26e894d4cfc2781"} Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.495248 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-index-gateway-0" podStartSLOduration=4.537638604 podStartE2EDuration="9.495215071s" podCreationTimestamp="2025-09-30 21:00:38 +0000 UTC" firstStartedPulling="2025-09-30 21:00:40.840158841 +0000 UTC m=+2825.956191968" lastFinishedPulling="2025-09-30 21:00:45.797735308 +0000 UTC m=+2830.913768435" observedRunningTime="2025-09-30 21:00:47.462468464 +0000 UTC m=+2832.578501601" watchObservedRunningTime="2025-09-30 21:00:47.495215071 +0000 UTC m=+2832.611248198" Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.525807 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-ingester-0" podStartSLOduration=4.6225132890000005 podStartE2EDuration="9.525786045s" podCreationTimestamp="2025-09-30 21:00:38 +0000 UTC" firstStartedPulling="2025-09-30 21:00:40.937871047 +0000 UTC m=+2826.053904174" lastFinishedPulling="2025-09-30 21:00:45.841143803 +0000 UTC m=+2830.957176930" observedRunningTime="2025-09-30 21:00:47.502416739 +0000 UTC m=+2832.618449876" watchObservedRunningTime="2025-09-30 21:00:47.525786045 +0000 UTC m=+2832.641819172" Sep 30 21:00:47 crc kubenswrapper[4919]: I0930 21:00:47.558110 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" podStartSLOduration=3.676484668 podStartE2EDuration="9.558091049s" podCreationTimestamp="2025-09-30 21:00:38 +0000 UTC" firstStartedPulling="2025-09-30 21:00:39.958831072 +0000 UTC m=+2825.074864199" lastFinishedPulling="2025-09-30 21:00:45.840437453 +0000 UTC m=+2830.956470580" observedRunningTime="2025-09-30 21:00:47.554944538 +0000 UTC m=+2832.670977655" watchObservedRunningTime="2025-09-30 21:00:47.558091049 +0000 UTC m=+2832.674124176" Sep 30 21:00:48 crc kubenswrapper[4919]: I0930 21:00:48.490978 4919 generic.go:334] "Generic (PLEG): container finished" podID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" containerID="38070fa3144a0ca2c52045199b5800187072f3f1c69e8f06483a7455eb4908dc" exitCode=2 Sep 30 21:00:48 crc kubenswrapper[4919]: I0930 21:00:48.491148 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" event={"ID":"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3","Type":"ContainerDied","Data":"38070fa3144a0ca2c52045199b5800187072f3f1c69e8f06483a7455eb4908dc"} Sep 30 21:00:48 crc kubenswrapper[4919]: I0930 21:00:48.491607 4919 scope.go:117] "RemoveContainer" containerID="5ab229daaaedb62f62edff11b88dd84a698159f265bcca6975a4f94446565e3d" Sep 30 21:00:48 crc kubenswrapper[4919]: I0930 21:00:48.491685 4919 scope.go:117] "RemoveContainer" containerID="38070fa3144a0ca2c52045199b5800187072f3f1c69e8f06483a7455eb4908dc" Sep 30 21:00:48 crc kubenswrapper[4919]: E0930 21:00:48.491991 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 10s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:00:48 crc kubenswrapper[4919]: I0930 21:00:48.497043 4919 generic.go:334] "Generic (PLEG): container finished" podID="77efc011-2683-4ff8-80f9-be0b81c8c7f4" containerID="4aae6fa4171eca5623f5807800c87f4e1d27550eebb10e9869bf38282fd377eb" exitCode=0 Sep 30 21:00:48 crc kubenswrapper[4919]: I0930 21:00:48.497106 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" event={"ID":"77efc011-2683-4ff8-80f9-be0b81c8c7f4","Type":"ContainerDied","Data":"4aae6fa4171eca5623f5807800c87f4e1d27550eebb10e9869bf38282fd377eb"} Sep 30 21:00:48 crc kubenswrapper[4919]: I0930 21:00:48.499796 4919 generic.go:334] "Generic (PLEG): container finished" podID="0dde18df-d1bd-4b36-82af-cd0967cd942b" containerID="839340cc85bca6179da491dd7180e9b96c654e8e84932bcb366504bd12ba4a4d" exitCode=2 Sep 30 21:00:48 crc kubenswrapper[4919]: I0930 21:00:48.501285 4919 scope.go:117] "RemoveContainer" containerID="839340cc85bca6179da491dd7180e9b96c654e8e84932bcb366504bd12ba4a4d" Sep 30 21:00:48 crc kubenswrapper[4919]: E0930 21:00:48.501511 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 10s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:00:48 crc kubenswrapper[4919]: I0930 21:00:48.501758 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" event={"ID":"0dde18df-d1bd-4b36-82af-cd0967cd942b","Type":"ContainerDied","Data":"839340cc85bca6179da491dd7180e9b96c654e8e84932bcb366504bd12ba4a4d"} Sep 30 21:00:49 crc kubenswrapper[4919]: I0930 21:00:49.029100 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-fb7a-account-create-js49h" Sep 30 21:00:49 crc kubenswrapper[4919]: I0930 21:00:49.037518 4919 scope.go:117] "RemoveContainer" containerID="7c9792e8712884e019d11f3f5a2c0e3eb87d2ed1fd9867f7088e06ce4a240788" Sep 30 21:00:49 crc kubenswrapper[4919]: I0930 21:00:49.164283 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmlsx\" (UniqueName: \"kubernetes.io/projected/c3f9fc61-e4ed-4943-a0a3-f152aa21d724-kube-api-access-dmlsx\") pod \"c3f9fc61-e4ed-4943-a0a3-f152aa21d724\" (UID: \"c3f9fc61-e4ed-4943-a0a3-f152aa21d724\") " Sep 30 21:00:49 crc kubenswrapper[4919]: I0930 21:00:49.184783 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3f9fc61-e4ed-4943-a0a3-f152aa21d724-kube-api-access-dmlsx" (OuterVolumeSpecName: "kube-api-access-dmlsx") pod "c3f9fc61-e4ed-4943-a0a3-f152aa21d724" (UID: "c3f9fc61-e4ed-4943-a0a3-f152aa21d724"). InnerVolumeSpecName "kube-api-access-dmlsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:49 crc kubenswrapper[4919]: I0930 21:00:49.266340 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmlsx\" (UniqueName: \"kubernetes.io/projected/c3f9fc61-e4ed-4943-a0a3-f152aa21d724-kube-api-access-dmlsx\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:49 crc kubenswrapper[4919]: I0930 21:00:49.424015 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:00:49 crc kubenswrapper[4919]: I0930 21:00:49.511755 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-fb7a-account-create-js49h" event={"ID":"c3f9fc61-e4ed-4943-a0a3-f152aa21d724","Type":"ContainerDied","Data":"7661931f9bc4fdac4d7a933f33dd22307a1257bd12ba95eadd8066066088a478"} Sep 30 21:00:49 crc kubenswrapper[4919]: I0930 21:00:49.512021 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7661931f9bc4fdac4d7a933f33dd22307a1257bd12ba95eadd8066066088a478" Sep 30 21:00:49 crc kubenswrapper[4919]: I0930 21:00:49.511776 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-fb7a-account-create-js49h" Sep 30 21:00:49 crc kubenswrapper[4919]: I0930 21:00:49.513812 4919 scope.go:117] "RemoveContainer" containerID="839340cc85bca6179da491dd7180e9b96c654e8e84932bcb366504bd12ba4a4d" Sep 30 21:00:49 crc kubenswrapper[4919]: E0930 21:00:49.514330 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 10s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:00:49 crc kubenswrapper[4919]: I0930 21:00:49.514837 4919 scope.go:117] "RemoveContainer" containerID="38070fa3144a0ca2c52045199b5800187072f3f1c69e8f06483a7455eb4908dc" Sep 30 21:00:49 crc kubenswrapper[4919]: E0930 21:00:49.515165 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 10s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:00:49 crc kubenswrapper[4919]: I0930 21:00:49.592088 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:00:49 crc kubenswrapper[4919]: E0930 21:00:49.710496 4919 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3f9fc61_e4ed_4943_a0a3_f152aa21d724.slice\": RecentStats: unable to find data in memory cache]" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.529792 4919 scope.go:117] "RemoveContainer" containerID="839340cc85bca6179da491dd7180e9b96c654e8e84932bcb366504bd12ba4a4d" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.530121 4919 scope.go:117] "RemoveContainer" containerID="38070fa3144a0ca2c52045199b5800187072f3f1c69e8f06483a7455eb4908dc" Sep 30 21:00:50 crc kubenswrapper[4919]: E0930 21:00:50.530397 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 10s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:00:50 crc kubenswrapper[4919]: E0930 21:00:50.530432 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 10s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.673747 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-dw2jq"] Sep 30 21:00:50 crc kubenswrapper[4919]: E0930 21:00:50.674372 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3f9fc61-e4ed-4943-a0a3-f152aa21d724" containerName="mariadb-account-create" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.674431 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3f9fc61-e4ed-4943-a0a3-f152aa21d724" containerName="mariadb-account-create" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.675461 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3f9fc61-e4ed-4943-a0a3-f152aa21d724" containerName="mariadb-account-create" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.676168 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.678477 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.678715 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.679400 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-m6hm4" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.679871 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.696190 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-dw2jq"] Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.801237 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-config-data\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.801310 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f781d002-f3f5-43e1-863c-ceb1fd87ec79-certs\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.801346 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbmdg\" (UniqueName: \"kubernetes.io/projected/f781d002-f3f5-43e1-863c-ceb1fd87ec79-kube-api-access-mbmdg\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.801369 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-combined-ca-bundle\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.801403 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-scripts\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.903511 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-config-data\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.903580 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f781d002-f3f5-43e1-863c-ceb1fd87ec79-certs\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.903622 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbmdg\" (UniqueName: \"kubernetes.io/projected/f781d002-f3f5-43e1-863c-ceb1fd87ec79-kube-api-access-mbmdg\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.903649 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-combined-ca-bundle\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.903688 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-scripts\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.909683 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-scripts\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.910405 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f781d002-f3f5-43e1-863c-ceb1fd87ec79-certs\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.913036 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-config-data\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.913116 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-combined-ca-bundle\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.924098 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbmdg\" (UniqueName: \"kubernetes.io/projected/f781d002-f3f5-43e1-863c-ceb1fd87ec79-kube-api-access-mbmdg\") pod \"cloudkitty-db-sync-dw2jq\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:50 crc kubenswrapper[4919]: I0930 21:00:50.995706 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.141052 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g6qqk"] Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.144058 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.168046 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g6qqk"] Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.312645 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-utilities\") pod \"certified-operators-g6qqk\" (UID: \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\") " pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.312735 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr29s\" (UniqueName: \"kubernetes.io/projected/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-kube-api-access-vr29s\") pod \"certified-operators-g6qqk\" (UID: \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\") " pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.312814 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-catalog-content\") pod \"certified-operators-g6qqk\" (UID: \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\") " pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.414289 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-catalog-content\") pod \"certified-operators-g6qqk\" (UID: \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\") " pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.414671 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-utilities\") pod \"certified-operators-g6qqk\" (UID: \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\") " pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.414713 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr29s\" (UniqueName: \"kubernetes.io/projected/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-kube-api-access-vr29s\") pod \"certified-operators-g6qqk\" (UID: \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\") " pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.414877 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-catalog-content\") pod \"certified-operators-g6qqk\" (UID: \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\") " pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.415095 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-utilities\") pod \"certified-operators-g6qqk\" (UID: \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\") " pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.438254 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr29s\" (UniqueName: \"kubernetes.io/projected/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-kube-api-access-vr29s\") pod \"certified-operators-g6qqk\" (UID: \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\") " pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:00:51 crc kubenswrapper[4919]: I0930 21:00:51.463678 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.290639 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.435512 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/77efc011-2683-4ff8-80f9-be0b81c8c7f4-util\") pod \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\" (UID: \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\") " Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.435918 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsrlv\" (UniqueName: \"kubernetes.io/projected/77efc011-2683-4ff8-80f9-be0b81c8c7f4-kube-api-access-qsrlv\") pod \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\" (UID: \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\") " Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.436185 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/77efc011-2683-4ff8-80f9-be0b81c8c7f4-bundle\") pod \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\" (UID: \"77efc011-2683-4ff8-80f9-be0b81c8c7f4\") " Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.437995 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77efc011-2683-4ff8-80f9-be0b81c8c7f4-bundle" (OuterVolumeSpecName: "bundle") pod "77efc011-2683-4ff8-80f9-be0b81c8c7f4" (UID: "77efc011-2683-4ff8-80f9-be0b81c8c7f4"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.441199 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77efc011-2683-4ff8-80f9-be0b81c8c7f4-kube-api-access-qsrlv" (OuterVolumeSpecName: "kube-api-access-qsrlv") pod "77efc011-2683-4ff8-80f9-be0b81c8c7f4" (UID: "77efc011-2683-4ff8-80f9-be0b81c8c7f4"). InnerVolumeSpecName "kube-api-access-qsrlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.446937 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77efc011-2683-4ff8-80f9-be0b81c8c7f4-util" (OuterVolumeSpecName: "util") pod "77efc011-2683-4ff8-80f9-be0b81c8c7f4" (UID: "77efc011-2683-4ff8-80f9-be0b81c8c7f4"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.538437 4919 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/77efc011-2683-4ff8-80f9-be0b81c8c7f4-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.538464 4919 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/77efc011-2683-4ff8-80f9-be0b81c8c7f4-util\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.538474 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsrlv\" (UniqueName: \"kubernetes.io/projected/77efc011-2683-4ff8-80f9-be0b81c8c7f4-kube-api-access-qsrlv\") on node \"crc\" DevicePath \"\"" Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.549699 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" event={"ID":"77efc011-2683-4ff8-80f9-be0b81c8c7f4","Type":"ContainerDied","Data":"82870f700eaa8e09be3e89b2458ce9a3426a6ad098f22b430a67d2778408d5c5"} Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.549740 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82870f700eaa8e09be3e89b2458ce9a3426a6ad098f22b430a67d2778408d5c5" Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.549817 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb" Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.603829 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g6qqk"] Sep 30 21:00:52 crc kubenswrapper[4919]: W0930 21:00:52.607436 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6ff1fff_89a9_42b1_98d1_f61f97e78a51.slice/crio-daf9750daa144d84ca2ead2b459453f0598a600aa5647891a918fe1fe83f5296 WatchSource:0}: Error finding container daf9750daa144d84ca2ead2b459453f0598a600aa5647891a918fe1fe83f5296: Status 404 returned error can't find the container with id daf9750daa144d84ca2ead2b459453f0598a600aa5647891a918fe1fe83f5296 Sep 30 21:00:52 crc kubenswrapper[4919]: I0930 21:00:52.716637 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-dw2jq"] Sep 30 21:00:53 crc kubenswrapper[4919]: I0930 21:00:53.563805 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-dw2jq" event={"ID":"f781d002-f3f5-43e1-863c-ceb1fd87ec79","Type":"ContainerStarted","Data":"28bd30131c3b2c881aa2ab00b72d23f0a350aaa38a4a669d5940c410add08de3"} Sep 30 21:00:53 crc kubenswrapper[4919]: I0930 21:00:53.565936 4919 generic.go:334] "Generic (PLEG): container finished" podID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerID="aee4ba40d1a61efc00d2f7416884fe125f979e822f76800ac92f37d8dc5f9c9f" exitCode=0 Sep 30 21:00:53 crc kubenswrapper[4919]: I0930 21:00:53.566105 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6qqk" event={"ID":"a6ff1fff-89a9-42b1-98d1-f61f97e78a51","Type":"ContainerDied","Data":"aee4ba40d1a61efc00d2f7416884fe125f979e822f76800ac92f37d8dc5f9c9f"} Sep 30 21:00:53 crc kubenswrapper[4919]: I0930 21:00:53.566300 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6qqk" event={"ID":"a6ff1fff-89a9-42b1-98d1-f61f97e78a51","Type":"ContainerStarted","Data":"daf9750daa144d84ca2ead2b459453f0598a600aa5647891a918fe1fe83f5296"} Sep 30 21:00:53 crc kubenswrapper[4919]: I0930 21:00:53.577163 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367","Type":"ContainerStarted","Data":"eec54927e84a35714bf5b672947e80c2c5b4256f5745ec9e62afd0edb8252af7"} Sep 30 21:00:53 crc kubenswrapper[4919]: I0930 21:00:53.578989 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 21:00:53 crc kubenswrapper[4919]: I0930 21:00:53.637463 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.3571100830000002 podStartE2EDuration="13.637439598s" podCreationTimestamp="2025-09-30 21:00:40 +0000 UTC" firstStartedPulling="2025-09-30 21:00:41.092710845 +0000 UTC m=+2826.208743972" lastFinishedPulling="2025-09-30 21:00:52.37304036 +0000 UTC m=+2837.489073487" observedRunningTime="2025-09-30 21:00:53.625647367 +0000 UTC m=+2838.741680494" watchObservedRunningTime="2025-09-30 21:00:53.637439598 +0000 UTC m=+2838.753472735" Sep 30 21:00:56 crc kubenswrapper[4919]: I0930 21:00:56.645935 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6qqk" event={"ID":"a6ff1fff-89a9-42b1-98d1-f61f97e78a51","Type":"ContainerStarted","Data":"6372c72ff6e3e68182179ad0fe7757035ed67b218e8bd6c1128ead4632d562a8"} Sep 30 21:00:57 crc kubenswrapper[4919]: I0930 21:00:57.657824 4919 generic.go:334] "Generic (PLEG): container finished" podID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerID="6372c72ff6e3e68182179ad0fe7757035ed67b218e8bd6c1128ead4632d562a8" exitCode=0 Sep 30 21:00:57 crc kubenswrapper[4919]: I0930 21:00:57.658089 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6qqk" event={"ID":"a6ff1fff-89a9-42b1-98d1-f61f97e78a51","Type":"ContainerDied","Data":"6372c72ff6e3e68182179ad0fe7757035ed67b218e8bd6c1128ead4632d562a8"} Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.146873 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29321101-km2h6"] Sep 30 21:01:00 crc kubenswrapper[4919]: E0930 21:01:00.147798 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77efc011-2683-4ff8-80f9-be0b81c8c7f4" containerName="util" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.147811 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="77efc011-2683-4ff8-80f9-be0b81c8c7f4" containerName="util" Sep 30 21:01:00 crc kubenswrapper[4919]: E0930 21:01:00.147824 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77efc011-2683-4ff8-80f9-be0b81c8c7f4" containerName="extract" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.147830 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="77efc011-2683-4ff8-80f9-be0b81c8c7f4" containerName="extract" Sep 30 21:01:00 crc kubenswrapper[4919]: E0930 21:01:00.147846 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77efc011-2683-4ff8-80f9-be0b81c8c7f4" containerName="pull" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.147852 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="77efc011-2683-4ff8-80f9-be0b81c8c7f4" containerName="pull" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.148047 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="77efc011-2683-4ff8-80f9-be0b81c8c7f4" containerName="extract" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.148835 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.166496 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29321101-km2h6"] Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.233760 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-combined-ca-bundle\") pod \"keystone-cron-29321101-km2h6\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.233826 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-config-data\") pod \"keystone-cron-29321101-km2h6\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.233883 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jfdb\" (UniqueName: \"kubernetes.io/projected/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-kube-api-access-2jfdb\") pod \"keystone-cron-29321101-km2h6\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.233952 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-fernet-keys\") pod \"keystone-cron-29321101-km2h6\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.336058 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-combined-ca-bundle\") pod \"keystone-cron-29321101-km2h6\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.336127 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-config-data\") pod \"keystone-cron-29321101-km2h6\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.336154 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jfdb\" (UniqueName: \"kubernetes.io/projected/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-kube-api-access-2jfdb\") pod \"keystone-cron-29321101-km2h6\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.336243 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-fernet-keys\") pod \"keystone-cron-29321101-km2h6\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.342155 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-combined-ca-bundle\") pod \"keystone-cron-29321101-km2h6\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.342708 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-config-data\") pod \"keystone-cron-29321101-km2h6\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.342755 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-fernet-keys\") pod \"keystone-cron-29321101-km2h6\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.355812 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jfdb\" (UniqueName: \"kubernetes.io/projected/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-kube-api-access-2jfdb\") pod \"keystone-cron-29321101-km2h6\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:00 crc kubenswrapper[4919]: I0930 21:01:00.480833 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:01 crc kubenswrapper[4919]: I0930 21:01:01.635087 4919 scope.go:117] "RemoveContainer" containerID="839340cc85bca6179da491dd7180e9b96c654e8e84932bcb366504bd12ba4a4d" Sep 30 21:01:01 crc kubenswrapper[4919]: I0930 21:01:01.968446 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-qbk5f"] Sep 30 21:01:01 crc kubenswrapper[4919]: I0930 21:01:01.970070 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-qbk5f" Sep 30 21:01:01 crc kubenswrapper[4919]: I0930 21:01:01.971653 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-m6kf2" Sep 30 21:01:01 crc kubenswrapper[4919]: I0930 21:01:01.972282 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Sep 30 21:01:01 crc kubenswrapper[4919]: I0930 21:01:01.977071 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Sep 30 21:01:01 crc kubenswrapper[4919]: I0930 21:01:01.993290 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-qbk5f"] Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.076681 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr5bf\" (UniqueName: \"kubernetes.io/projected/4018daf1-fc20-4051-86f6-515140b17020-kube-api-access-wr5bf\") pod \"obo-prometheus-operator-7c8cf85677-qbk5f\" (UID: \"4018daf1-fc20-4051-86f6-515140b17020\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-qbk5f" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.088153 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz"] Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.089733 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.093154 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.093607 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-fffd6" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.105333 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz"] Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.119785 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx"] Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.121151 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.173315 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx"] Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.178902 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr5bf\" (UniqueName: \"kubernetes.io/projected/4018daf1-fc20-4051-86f6-515140b17020-kube-api-access-wr5bf\") pod \"obo-prometheus-operator-7c8cf85677-qbk5f\" (UID: \"4018daf1-fc20-4051-86f6-515140b17020\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-qbk5f" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.179007 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b530a7bf-2e7a-4396-9b34-38ae127ca22e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz\" (UID: \"b530a7bf-2e7a-4396-9b34-38ae127ca22e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.179057 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b530a7bf-2e7a-4396-9b34-38ae127ca22e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz\" (UID: \"b530a7bf-2e7a-4396-9b34-38ae127ca22e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.218384 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr5bf\" (UniqueName: \"kubernetes.io/projected/4018daf1-fc20-4051-86f6-515140b17020-kube-api-access-wr5bf\") pod \"obo-prometheus-operator-7c8cf85677-qbk5f\" (UID: \"4018daf1-fc20-4051-86f6-515140b17020\") " pod="openshift-operators/obo-prometheus-operator-7c8cf85677-qbk5f" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.280422 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b530a7bf-2e7a-4396-9b34-38ae127ca22e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz\" (UID: \"b530a7bf-2e7a-4396-9b34-38ae127ca22e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.280488 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b530a7bf-2e7a-4396-9b34-38ae127ca22e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz\" (UID: \"b530a7bf-2e7a-4396-9b34-38ae127ca22e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.280531 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4e6b1904-0dbb-41d6-8345-5e71f57442e2-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx\" (UID: \"4e6b1904-0dbb-41d6-8345-5e71f57442e2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.280596 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4e6b1904-0dbb-41d6-8345-5e71f57442e2-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx\" (UID: \"4e6b1904-0dbb-41d6-8345-5e71f57442e2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.287082 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b530a7bf-2e7a-4396-9b34-38ae127ca22e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz\" (UID: \"b530a7bf-2e7a-4396-9b34-38ae127ca22e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.290477 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b530a7bf-2e7a-4396-9b34-38ae127ca22e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz\" (UID: \"b530a7bf-2e7a-4396-9b34-38ae127ca22e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.305666 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-qbk5f" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.311504 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-sbrnf"] Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.312815 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.316769 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-49pt6" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.317092 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.336438 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-sbrnf"] Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.385474 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/ada97c0d-8672-4535-a82d-aeb57a2b192d-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-sbrnf\" (UID: \"ada97c0d-8672-4535-a82d-aeb57a2b192d\") " pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.385547 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4e6b1904-0dbb-41d6-8345-5e71f57442e2-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx\" (UID: \"4e6b1904-0dbb-41d6-8345-5e71f57442e2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.385576 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp87f\" (UniqueName: \"kubernetes.io/projected/ada97c0d-8672-4535-a82d-aeb57a2b192d-kube-api-access-tp87f\") pod \"observability-operator-cc5f78dfc-sbrnf\" (UID: \"ada97c0d-8672-4535-a82d-aeb57a2b192d\") " pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.385635 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4e6b1904-0dbb-41d6-8345-5e71f57442e2-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx\" (UID: \"4e6b1904-0dbb-41d6-8345-5e71f57442e2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.394747 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4e6b1904-0dbb-41d6-8345-5e71f57442e2-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx\" (UID: \"4e6b1904-0dbb-41d6-8345-5e71f57442e2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.396606 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4e6b1904-0dbb-41d6-8345-5e71f57442e2-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx\" (UID: \"4e6b1904-0dbb-41d6-8345-5e71f57442e2\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.445489 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.456093 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.487462 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/ada97c0d-8672-4535-a82d-aeb57a2b192d-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-sbrnf\" (UID: \"ada97c0d-8672-4535-a82d-aeb57a2b192d\") " pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.487529 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp87f\" (UniqueName: \"kubernetes.io/projected/ada97c0d-8672-4535-a82d-aeb57a2b192d-kube-api-access-tp87f\") pod \"observability-operator-cc5f78dfc-sbrnf\" (UID: \"ada97c0d-8672-4535-a82d-aeb57a2b192d\") " pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.491323 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/ada97c0d-8672-4535-a82d-aeb57a2b192d-observability-operator-tls\") pod \"observability-operator-cc5f78dfc-sbrnf\" (UID: \"ada97c0d-8672-4535-a82d-aeb57a2b192d\") " pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.517122 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp87f\" (UniqueName: \"kubernetes.io/projected/ada97c0d-8672-4535-a82d-aeb57a2b192d-kube-api-access-tp87f\") pod \"observability-operator-cc5f78dfc-sbrnf\" (UID: \"ada97c0d-8672-4535-a82d-aeb57a2b192d\") " pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.525286 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-222zn"] Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.526990 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-222zn" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.531331 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-t8w9m" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.539920 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-222zn"] Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.665872 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.691408 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/1701a4b5-dbbc-41a2-96ae-cc483f69e8b9-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-222zn\" (UID: \"1701a4b5-dbbc-41a2-96ae-cc483f69e8b9\") " pod="openshift-operators/perses-operator-54bc95c9fb-222zn" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.691462 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtzxw\" (UniqueName: \"kubernetes.io/projected/1701a4b5-dbbc-41a2-96ae-cc483f69e8b9-kube-api-access-mtzxw\") pod \"perses-operator-54bc95c9fb-222zn\" (UID: \"1701a4b5-dbbc-41a2-96ae-cc483f69e8b9\") " pod="openshift-operators/perses-operator-54bc95c9fb-222zn" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.793165 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/1701a4b5-dbbc-41a2-96ae-cc483f69e8b9-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-222zn\" (UID: \"1701a4b5-dbbc-41a2-96ae-cc483f69e8b9\") " pod="openshift-operators/perses-operator-54bc95c9fb-222zn" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.793208 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtzxw\" (UniqueName: \"kubernetes.io/projected/1701a4b5-dbbc-41a2-96ae-cc483f69e8b9-kube-api-access-mtzxw\") pod \"perses-operator-54bc95c9fb-222zn\" (UID: \"1701a4b5-dbbc-41a2-96ae-cc483f69e8b9\") " pod="openshift-operators/perses-operator-54bc95c9fb-222zn" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.794959 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/1701a4b5-dbbc-41a2-96ae-cc483f69e8b9-openshift-service-ca\") pod \"perses-operator-54bc95c9fb-222zn\" (UID: \"1701a4b5-dbbc-41a2-96ae-cc483f69e8b9\") " pod="openshift-operators/perses-operator-54bc95c9fb-222zn" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.831737 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtzxw\" (UniqueName: \"kubernetes.io/projected/1701a4b5-dbbc-41a2-96ae-cc483f69e8b9-kube-api-access-mtzxw\") pod \"perses-operator-54bc95c9fb-222zn\" (UID: \"1701a4b5-dbbc-41a2-96ae-cc483f69e8b9\") " pod="openshift-operators/perses-operator-54bc95c9fb-222zn" Sep 30 21:01:02 crc kubenswrapper[4919]: I0930 21:01:02.858675 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-54bc95c9fb-222zn" Sep 30 21:01:05 crc kubenswrapper[4919]: I0930 21:01:05.640332 4919 scope.go:117] "RemoveContainer" containerID="38070fa3144a0ca2c52045199b5800187072f3f1c69e8f06483a7455eb4908dc" Sep 30 21:01:06 crc kubenswrapper[4919]: I0930 21:01:06.769356 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6qqk" event={"ID":"a6ff1fff-89a9-42b1-98d1-f61f97e78a51","Type":"ContainerStarted","Data":"95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce"} Sep 30 21:01:06 crc kubenswrapper[4919]: I0930 21:01:06.806454 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g6qqk" podStartSLOduration=11.215411619 podStartE2EDuration="15.806431145s" podCreationTimestamp="2025-09-30 21:00:51 +0000 UTC" firstStartedPulling="2025-09-30 21:00:53.567802884 +0000 UTC m=+2838.683836011" lastFinishedPulling="2025-09-30 21:00:58.15882241 +0000 UTC m=+2843.274855537" observedRunningTime="2025-09-30 21:01:06.800358639 +0000 UTC m=+2851.916391766" watchObservedRunningTime="2025-09-30 21:01:06.806431145 +0000 UTC m=+2851.922464272" Sep 30 21:01:08 crc kubenswrapper[4919]: I0930 21:01:08.993627 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-distributor-bccccd5f6-zhbb7" Sep 30 21:01:09 crc kubenswrapper[4919]: I0930 21:01:09.235113 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-querier-6b6cdc96db-9xqzp" Sep 30 21:01:09 crc kubenswrapper[4919]: I0930 21:01:09.264696 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd" Sep 30 21:01:09 crc kubenswrapper[4919]: I0930 21:01:09.423730 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:01:09 crc kubenswrapper[4919]: I0930 21:01:09.592721 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:01:10 crc kubenswrapper[4919]: I0930 21:01:10.176098 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-compactor-0" Sep 30 21:01:10 crc kubenswrapper[4919]: I0930 21:01:10.324578 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-index-gateway-0" Sep 30 21:01:10 crc kubenswrapper[4919]: I0930 21:01:10.383122 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="85f27421-a520-4043-b8d5-7729b07a0bed" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 21:01:10 crc kubenswrapper[4919]: I0930 21:01:10.597136 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 21:01:11 crc kubenswrapper[4919]: I0930 21:01:11.373467 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-7c8cf85677-qbk5f"] Sep 30 21:01:11 crc kubenswrapper[4919]: I0930 21:01:11.468442 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:01:11 crc kubenswrapper[4919]: I0930 21:01:11.468503 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:01:12 crc kubenswrapper[4919]: I0930 21:01:12.537366 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-g6qqk" podUID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerName="registry-server" probeResult="failure" output=< Sep 30 21:01:12 crc kubenswrapper[4919]: timeout: failed to connect service ":50051" within 1s Sep 30 21:01:12 crc kubenswrapper[4919]: > Sep 30 21:01:13 crc kubenswrapper[4919]: I0930 21:01:13.747484 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-cc5f78dfc-sbrnf"] Sep 30 21:01:13 crc kubenswrapper[4919]: I0930 21:01:13.861108 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-qbk5f" event={"ID":"4018daf1-fc20-4051-86f6-515140b17020","Type":"ContainerStarted","Data":"66b3fd91dd1b389a5acdeb0274713b7ff162a29d3e44f9d21f660bc3f00c7c59"} Sep 30 21:01:14 crc kubenswrapper[4919]: I0930 21:01:14.899025 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" event={"ID":"ada97c0d-8672-4535-a82d-aeb57a2b192d","Type":"ContainerStarted","Data":"cc133aa27f20f5b4607a8eb290eabac6cac603af1687b27ab21a9c822e5653db"} Sep 30 21:01:15 crc kubenswrapper[4919]: I0930 21:01:15.093838 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx"] Sep 30 21:01:16 crc kubenswrapper[4919]: I0930 21:01:16.865886 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29321101-km2h6"] Sep 30 21:01:16 crc kubenswrapper[4919]: W0930 21:01:16.887414 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod788fa050_bf94_4ec2_b030_7dc6f5ecfef3.slice/crio-cbe375d4aef6c746f31a8a60b879b9475668017a75e989f31e18959c968990ed WatchSource:0}: Error finding container cbe375d4aef6c746f31a8a60b879b9475668017a75e989f31e18959c968990ed: Status 404 returned error can't find the container with id cbe375d4aef6c746f31a8a60b879b9475668017a75e989f31e18959c968990ed Sep 30 21:01:16 crc kubenswrapper[4919]: I0930 21:01:16.938114 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321101-km2h6" event={"ID":"788fa050-bf94-4ec2-b030-7dc6f5ecfef3","Type":"ContainerStarted","Data":"cbe375d4aef6c746f31a8a60b879b9475668017a75e989f31e18959c968990ed"} Sep 30 21:01:16 crc kubenswrapper[4919]: I0930 21:01:16.943004 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx" event={"ID":"4e6b1904-0dbb-41d6-8345-5e71f57442e2","Type":"ContainerStarted","Data":"0211105c2a08d260243e2c20e2b4a40e7a5ee952be314ac72c7ae4606ee435b3"} Sep 30 21:01:17 crc kubenswrapper[4919]: I0930 21:01:16.993983 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz"] Sep 30 21:01:17 crc kubenswrapper[4919]: I0930 21:01:17.189886 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-54bc95c9fb-222zn"] Sep 30 21:01:17 crc kubenswrapper[4919]: W0930 21:01:17.194536 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1701a4b5_dbbc_41a2_96ae_cc483f69e8b9.slice/crio-acbfb3ef9badc8dfdc5a669b515ad2346eaaed2559878fd98420586e7ddbbd36 WatchSource:0}: Error finding container acbfb3ef9badc8dfdc5a669b515ad2346eaaed2559878fd98420586e7ddbbd36: Status 404 returned error can't find the container with id acbfb3ef9badc8dfdc5a669b515ad2346eaaed2559878fd98420586e7ddbbd36 Sep 30 21:01:17 crc kubenswrapper[4919]: E0930 21:01:17.209664 4919 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/jwysogla/cloudkitty-api@sha256:5541d1160f777174a00982fde3c26a9b32ba156f9f140c9628f66d0eef834c86" Sep 30 21:01:17 crc kubenswrapper[4919]: E0930 21:01:17.209828 4919 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.io/jwysogla/cloudkitty-api@sha256:5541d1160f777174a00982fde3c26a9b32ba156f9f140c9628f66d0eef834c86,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mbmdg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-dw2jq_openstack(f781d002-f3f5-43e1-863c-ceb1fd87ec79): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 30 21:01:17 crc kubenswrapper[4919]: E0930 21:01:17.211082 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-dw2jq" podUID="f781d002-f3f5-43e1-863c-ceb1fd87ec79" Sep 30 21:01:17 crc kubenswrapper[4919]: I0930 21:01:17.969865 4919 generic.go:334] "Generic (PLEG): container finished" podID="0dde18df-d1bd-4b36-82af-cd0967cd942b" containerID="3f96202aad8d1794a110a8cc52909d307ffc55d5cfcf2348b38527ff1a21092f" exitCode=2 Sep 30 21:01:17 crc kubenswrapper[4919]: I0930 21:01:17.970279 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" event={"ID":"0dde18df-d1bd-4b36-82af-cd0967cd942b","Type":"ContainerDied","Data":"3f96202aad8d1794a110a8cc52909d307ffc55d5cfcf2348b38527ff1a21092f"} Sep 30 21:01:17 crc kubenswrapper[4919]: I0930 21:01:17.970336 4919 scope.go:117] "RemoveContainer" containerID="839340cc85bca6179da491dd7180e9b96c654e8e84932bcb366504bd12ba4a4d" Sep 30 21:01:17 crc kubenswrapper[4919]: I0930 21:01:17.971325 4919 scope.go:117] "RemoveContainer" containerID="3f96202aad8d1794a110a8cc52909d307ffc55d5cfcf2348b38527ff1a21092f" Sep 30 21:01:17 crc kubenswrapper[4919]: E0930 21:01:17.971935 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:01:18 crc kubenswrapper[4919]: I0930 21:01:18.010139 4919 generic.go:334] "Generic (PLEG): container finished" podID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" containerID="520b0db5f1472337aa0f061e7447c42d35f79bc64d466b90cb3afd355d7b22d9" exitCode=2 Sep 30 21:01:18 crc kubenswrapper[4919]: I0930 21:01:18.010262 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" event={"ID":"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3","Type":"ContainerDied","Data":"520b0db5f1472337aa0f061e7447c42d35f79bc64d466b90cb3afd355d7b22d9"} Sep 30 21:01:18 crc kubenswrapper[4919]: I0930 21:01:18.010861 4919 scope.go:117] "RemoveContainer" containerID="520b0db5f1472337aa0f061e7447c42d35f79bc64d466b90cb3afd355d7b22d9" Sep 30 21:01:18 crc kubenswrapper[4919]: E0930 21:01:18.011125 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:01:18 crc kubenswrapper[4919]: I0930 21:01:18.021307 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-222zn" event={"ID":"1701a4b5-dbbc-41a2-96ae-cc483f69e8b9","Type":"ContainerStarted","Data":"acbfb3ef9badc8dfdc5a669b515ad2346eaaed2559878fd98420586e7ddbbd36"} Sep 30 21:01:18 crc kubenswrapper[4919]: I0930 21:01:18.024111 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321101-km2h6" event={"ID":"788fa050-bf94-4ec2-b030-7dc6f5ecfef3","Type":"ContainerStarted","Data":"80826141544b8dd6ba4cb6c6c4c75a4c4240dbc0c1790c48410038dacbbbec6e"} Sep 30 21:01:18 crc kubenswrapper[4919]: I0930 21:01:18.026944 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz" event={"ID":"b530a7bf-2e7a-4396-9b34-38ae127ca22e","Type":"ContainerStarted","Data":"a19fbdb02aaf9aa4dbe26936634b3efaebccef3d54d191fb6c813210da4d583b"} Sep 30 21:01:18 crc kubenswrapper[4919]: E0930 21:01:18.027730 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/jwysogla/cloudkitty-api@sha256:5541d1160f777174a00982fde3c26a9b32ba156f9f140c9628f66d0eef834c86\\\"\"" pod="openstack/cloudkitty-db-sync-dw2jq" podUID="f781d002-f3f5-43e1-863c-ceb1fd87ec79" Sep 30 21:01:18 crc kubenswrapper[4919]: I0930 21:01:18.066464 4919 scope.go:117] "RemoveContainer" containerID="38070fa3144a0ca2c52045199b5800187072f3f1c69e8f06483a7455eb4908dc" Sep 30 21:01:18 crc kubenswrapper[4919]: I0930 21:01:18.127774 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29321101-km2h6" podStartSLOduration=18.127753396 podStartE2EDuration="18.127753396s" podCreationTimestamp="2025-09-30 21:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:01:18.102534327 +0000 UTC m=+2863.218567444" watchObservedRunningTime="2025-09-30 21:01:18.127753396 +0000 UTC m=+2863.243786523" Sep 30 21:01:19 crc kubenswrapper[4919]: I0930 21:01:19.051935 4919 scope.go:117] "RemoveContainer" containerID="3f96202aad8d1794a110a8cc52909d307ffc55d5cfcf2348b38527ff1a21092f" Sep 30 21:01:19 crc kubenswrapper[4919]: E0930 21:01:19.052540 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:01:19 crc kubenswrapper[4919]: I0930 21:01:19.056037 4919 scope.go:117] "RemoveContainer" containerID="520b0db5f1472337aa0f061e7447c42d35f79bc64d466b90cb3afd355d7b22d9" Sep 30 21:01:19 crc kubenswrapper[4919]: E0930 21:01:19.056313 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:01:19 crc kubenswrapper[4919]: I0930 21:01:19.423359 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:01:19 crc kubenswrapper[4919]: I0930 21:01:19.591954 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:01:20 crc kubenswrapper[4919]: I0930 21:01:20.066473 4919 scope.go:117] "RemoveContainer" containerID="520b0db5f1472337aa0f061e7447c42d35f79bc64d466b90cb3afd355d7b22d9" Sep 30 21:01:20 crc kubenswrapper[4919]: I0930 21:01:20.066556 4919 scope.go:117] "RemoveContainer" containerID="3f96202aad8d1794a110a8cc52909d307ffc55d5cfcf2348b38527ff1a21092f" Sep 30 21:01:20 crc kubenswrapper[4919]: E0930 21:01:20.066842 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:01:20 crc kubenswrapper[4919]: E0930 21:01:20.066845 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:01:20 crc kubenswrapper[4919]: I0930 21:01:20.353327 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="85f27421-a520-4043-b8d5-7729b07a0bed" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 21:01:22 crc kubenswrapper[4919]: I0930 21:01:22.528855 4919 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-g6qqk" podUID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerName="registry-server" probeResult="failure" output=< Sep 30 21:01:22 crc kubenswrapper[4919]: timeout: failed to connect service ":50051" within 1s Sep 30 21:01:22 crc kubenswrapper[4919]: > Sep 30 21:01:23 crc kubenswrapper[4919]: I0930 21:01:23.128311 4919 generic.go:334] "Generic (PLEG): container finished" podID="788fa050-bf94-4ec2-b030-7dc6f5ecfef3" containerID="80826141544b8dd6ba4cb6c6c4c75a4c4240dbc0c1790c48410038dacbbbec6e" exitCode=0 Sep 30 21:01:23 crc kubenswrapper[4919]: I0930 21:01:23.128352 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321101-km2h6" event={"ID":"788fa050-bf94-4ec2-b030-7dc6f5ecfef3","Type":"ContainerDied","Data":"80826141544b8dd6ba4cb6c6c4c75a4c4240dbc0c1790c48410038dacbbbec6e"} Sep 30 21:01:26 crc kubenswrapper[4919]: I0930 21:01:26.062344 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:01:26 crc kubenswrapper[4919]: I0930 21:01:26.063335 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:01:28 crc kubenswrapper[4919]: I0930 21:01:28.859477 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:28 crc kubenswrapper[4919]: I0930 21:01:28.942126 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-fernet-keys\") pod \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " Sep 30 21:01:28 crc kubenswrapper[4919]: I0930 21:01:28.942197 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-combined-ca-bundle\") pod \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " Sep 30 21:01:28 crc kubenswrapper[4919]: I0930 21:01:28.942330 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-config-data\") pod \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " Sep 30 21:01:28 crc kubenswrapper[4919]: I0930 21:01:28.942475 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jfdb\" (UniqueName: \"kubernetes.io/projected/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-kube-api-access-2jfdb\") pod \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\" (UID: \"788fa050-bf94-4ec2-b030-7dc6f5ecfef3\") " Sep 30 21:01:28 crc kubenswrapper[4919]: I0930 21:01:28.951401 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "788fa050-bf94-4ec2-b030-7dc6f5ecfef3" (UID: "788fa050-bf94-4ec2-b030-7dc6f5ecfef3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:28 crc kubenswrapper[4919]: I0930 21:01:28.951750 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-kube-api-access-2jfdb" (OuterVolumeSpecName: "kube-api-access-2jfdb") pod "788fa050-bf94-4ec2-b030-7dc6f5ecfef3" (UID: "788fa050-bf94-4ec2-b030-7dc6f5ecfef3"). InnerVolumeSpecName "kube-api-access-2jfdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:28 crc kubenswrapper[4919]: I0930 21:01:28.980763 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "788fa050-bf94-4ec2-b030-7dc6f5ecfef3" (UID: "788fa050-bf94-4ec2-b030-7dc6f5ecfef3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:29 crc kubenswrapper[4919]: I0930 21:01:29.002682 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-config-data" (OuterVolumeSpecName: "config-data") pod "788fa050-bf94-4ec2-b030-7dc6f5ecfef3" (UID: "788fa050-bf94-4ec2-b030-7dc6f5ecfef3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:29 crc kubenswrapper[4919]: I0930 21:01:29.045104 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jfdb\" (UniqueName: \"kubernetes.io/projected/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-kube-api-access-2jfdb\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:29 crc kubenswrapper[4919]: I0930 21:01:29.045158 4919 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:29 crc kubenswrapper[4919]: I0930 21:01:29.045167 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:29 crc kubenswrapper[4919]: I0930 21:01:29.045176 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/788fa050-bf94-4ec2-b030-7dc6f5ecfef3-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:29 crc kubenswrapper[4919]: I0930 21:01:29.194448 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29321101-km2h6" event={"ID":"788fa050-bf94-4ec2-b030-7dc6f5ecfef3","Type":"ContainerDied","Data":"cbe375d4aef6c746f31a8a60b879b9475668017a75e989f31e18959c968990ed"} Sep 30 21:01:29 crc kubenswrapper[4919]: I0930 21:01:29.194740 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbe375d4aef6c746f31a8a60b879b9475668017a75e989f31e18959c968990ed" Sep 30 21:01:29 crc kubenswrapper[4919]: I0930 21:01:29.194550 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29321101-km2h6" Sep 30 21:01:30 crc kubenswrapper[4919]: I0930 21:01:30.338761 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="85f27421-a520-4043-b8d5-7729b07a0bed" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 30 21:01:30 crc kubenswrapper[4919]: I0930 21:01:30.632379 4919 scope.go:117] "RemoveContainer" containerID="520b0db5f1472337aa0f061e7447c42d35f79bc64d466b90cb3afd355d7b22d9" Sep 30 21:01:30 crc kubenswrapper[4919]: E0930 21:01:30.632709 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.221699 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz" event={"ID":"b530a7bf-2e7a-4396-9b34-38ae127ca22e","Type":"ContainerStarted","Data":"69978f30467fcaaf1b00976268dbfa5061d56be0e663890a40e4a55fb8393a59"} Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.227190 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-54bc95c9fb-222zn" event={"ID":"1701a4b5-dbbc-41a2-96ae-cc483f69e8b9","Type":"ContainerStarted","Data":"ea4a3fcf7ba5dfc86fed5e360cf1a8ce3864f5ef4cc905fbe2c5ca56f0b7c86f"} Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.227709 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-54bc95c9fb-222zn" Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.229894 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx" event={"ID":"4e6b1904-0dbb-41d6-8345-5e71f57442e2","Type":"ContainerStarted","Data":"c7888d5ceab6ae17bcfffee1419f413240786e498f56a97d02d8e4eda5e604d3"} Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.232562 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" event={"ID":"ada97c0d-8672-4535-a82d-aeb57a2b192d","Type":"ContainerStarted","Data":"1ef5cacecf2a359db2acff872a7e09a1ae75058ac52a5c704592d166274e6bbc"} Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.232821 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.236194 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-qbk5f" event={"ID":"4018daf1-fc20-4051-86f6-515140b17020","Type":"ContainerStarted","Data":"996b5b5b8b68420fd55882db0a721cb70627d0f8412f5a4ccee682fa149f3efe"} Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.252487 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.256244 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz" podStartSLOduration=16.223167437 podStartE2EDuration="29.256206513s" podCreationTimestamp="2025-09-30 21:01:02 +0000 UTC" firstStartedPulling="2025-09-30 21:01:17.027290381 +0000 UTC m=+2862.143323508" lastFinishedPulling="2025-09-30 21:01:30.060329437 +0000 UTC m=+2875.176362584" observedRunningTime="2025-09-30 21:01:31.252484505 +0000 UTC m=+2876.368517632" watchObservedRunningTime="2025-09-30 21:01:31.256206513 +0000 UTC m=+2876.372239640" Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.273945 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-7c8cf85677-qbk5f" podStartSLOduration=13.404464827 podStartE2EDuration="30.273929606s" podCreationTimestamp="2025-09-30 21:01:01 +0000 UTC" firstStartedPulling="2025-09-30 21:01:13.167050329 +0000 UTC m=+2858.283083456" lastFinishedPulling="2025-09-30 21:01:30.036515108 +0000 UTC m=+2875.152548235" observedRunningTime="2025-09-30 21:01:31.273120732 +0000 UTC m=+2876.389153859" watchObservedRunningTime="2025-09-30 21:01:31.273929606 +0000 UTC m=+2876.389962733" Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.308581 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-54bc95c9fb-222zn" podStartSLOduration=16.404767648 podStartE2EDuration="29.308559877s" podCreationTimestamp="2025-09-30 21:01:02 +0000 UTC" firstStartedPulling="2025-09-30 21:01:17.198695447 +0000 UTC m=+2862.314728574" lastFinishedPulling="2025-09-30 21:01:30.102487676 +0000 UTC m=+2875.218520803" observedRunningTime="2025-09-30 21:01:31.297745064 +0000 UTC m=+2876.413778191" watchObservedRunningTime="2025-09-30 21:01:31.308559877 +0000 UTC m=+2876.424593004" Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.336731 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx" podStartSLOduration=15.682586912 podStartE2EDuration="29.336713511s" podCreationTimestamp="2025-09-30 21:01:02 +0000 UTC" firstStartedPulling="2025-09-30 21:01:16.383983086 +0000 UTC m=+2861.500016213" lastFinishedPulling="2025-09-30 21:01:30.038109685 +0000 UTC m=+2875.154142812" observedRunningTime="2025-09-30 21:01:31.330133851 +0000 UTC m=+2876.446166978" watchObservedRunningTime="2025-09-30 21:01:31.336713511 +0000 UTC m=+2876.452746628" Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.362191 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-cc5f78dfc-sbrnf" podStartSLOduration=13.86791688 podStartE2EDuration="29.362174908s" podCreationTimestamp="2025-09-30 21:01:02 +0000 UTC" firstStartedPulling="2025-09-30 21:01:14.543824666 +0000 UTC m=+2859.659857783" lastFinishedPulling="2025-09-30 21:01:30.038082684 +0000 UTC m=+2875.154115811" observedRunningTime="2025-09-30 21:01:31.358889393 +0000 UTC m=+2876.474922520" watchObservedRunningTime="2025-09-30 21:01:31.362174908 +0000 UTC m=+2876.478208035" Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.513509 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.567014 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:01:31 crc kubenswrapper[4919]: I0930 21:01:31.750825 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g6qqk"] Sep 30 21:01:33 crc kubenswrapper[4919]: I0930 21:01:33.255954 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-g6qqk" podUID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerName="registry-server" containerID="cri-o://95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce" gracePeriod=2 Sep 30 21:01:33 crc kubenswrapper[4919]: I0930 21:01:33.798165 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:01:33 crc kubenswrapper[4919]: I0930 21:01:33.876864 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-catalog-content\") pod \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\" (UID: \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\") " Sep 30 21:01:33 crc kubenswrapper[4919]: I0930 21:01:33.877010 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vr29s\" (UniqueName: \"kubernetes.io/projected/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-kube-api-access-vr29s\") pod \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\" (UID: \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\") " Sep 30 21:01:33 crc kubenswrapper[4919]: I0930 21:01:33.877125 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-utilities\") pod \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\" (UID: \"a6ff1fff-89a9-42b1-98d1-f61f97e78a51\") " Sep 30 21:01:33 crc kubenswrapper[4919]: I0930 21:01:33.878548 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-utilities" (OuterVolumeSpecName: "utilities") pod "a6ff1fff-89a9-42b1-98d1-f61f97e78a51" (UID: "a6ff1fff-89a9-42b1-98d1-f61f97e78a51"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:01:33 crc kubenswrapper[4919]: I0930 21:01:33.883130 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-kube-api-access-vr29s" (OuterVolumeSpecName: "kube-api-access-vr29s") pod "a6ff1fff-89a9-42b1-98d1-f61f97e78a51" (UID: "a6ff1fff-89a9-42b1-98d1-f61f97e78a51"). InnerVolumeSpecName "kube-api-access-vr29s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:33 crc kubenswrapper[4919]: I0930 21:01:33.963726 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a6ff1fff-89a9-42b1-98d1-f61f97e78a51" (UID: "a6ff1fff-89a9-42b1-98d1-f61f97e78a51"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:01:33 crc kubenswrapper[4919]: I0930 21:01:33.985801 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vr29s\" (UniqueName: \"kubernetes.io/projected/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-kube-api-access-vr29s\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:33 crc kubenswrapper[4919]: I0930 21:01:33.986138 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:33 crc kubenswrapper[4919]: I0930 21:01:33.986151 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6ff1fff-89a9-42b1-98d1-f61f97e78a51-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.284352 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-dw2jq" event={"ID":"f781d002-f3f5-43e1-863c-ceb1fd87ec79","Type":"ContainerStarted","Data":"bc5a3a38e243601d2c4fd466bc4994264150ca77abe99bdc24de352b55d7425f"} Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.297623 4919 generic.go:334] "Generic (PLEG): container finished" podID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerID="95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce" exitCode=0 Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.297664 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6qqk" event={"ID":"a6ff1fff-89a9-42b1-98d1-f61f97e78a51","Type":"ContainerDied","Data":"95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce"} Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.297691 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g6qqk" event={"ID":"a6ff1fff-89a9-42b1-98d1-f61f97e78a51","Type":"ContainerDied","Data":"daf9750daa144d84ca2ead2b459453f0598a600aa5647891a918fe1fe83f5296"} Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.297709 4919 scope.go:117] "RemoveContainer" containerID="95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.297829 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g6qqk" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.348906 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-dw2jq" podStartSLOduration=3.863966269 podStartE2EDuration="44.348866435s" podCreationTimestamp="2025-09-30 21:00:50 +0000 UTC" firstStartedPulling="2025-09-30 21:00:52.737208482 +0000 UTC m=+2837.853241609" lastFinishedPulling="2025-09-30 21:01:33.222108648 +0000 UTC m=+2878.338141775" observedRunningTime="2025-09-30 21:01:34.312692439 +0000 UTC m=+2879.428725566" watchObservedRunningTime="2025-09-30 21:01:34.348866435 +0000 UTC m=+2879.464899562" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.350397 4919 scope.go:117] "RemoveContainer" containerID="6372c72ff6e3e68182179ad0fe7757035ed67b218e8bd6c1128ead4632d562a8" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.399885 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g6qqk"] Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.411455 4919 scope.go:117] "RemoveContainer" containerID="aee4ba40d1a61efc00d2f7416884fe125f979e822f76800ac92f37d8dc5f9c9f" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.413688 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-g6qqk"] Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.476867 4919 scope.go:117] "RemoveContainer" containerID="95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce" Sep 30 21:01:34 crc kubenswrapper[4919]: E0930 21:01:34.481442 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce\": container with ID starting with 95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce not found: ID does not exist" containerID="95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.481487 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce"} err="failed to get container status \"95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce\": rpc error: code = NotFound desc = could not find container \"95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce\": container with ID starting with 95e4f3f2f645090abc8f3c0b35291b7f89268db3c4c3ecb14e034a5a36e59dce not found: ID does not exist" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.481513 4919 scope.go:117] "RemoveContainer" containerID="6372c72ff6e3e68182179ad0fe7757035ed67b218e8bd6c1128ead4632d562a8" Sep 30 21:01:34 crc kubenswrapper[4919]: E0930 21:01:34.484023 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6372c72ff6e3e68182179ad0fe7757035ed67b218e8bd6c1128ead4632d562a8\": container with ID starting with 6372c72ff6e3e68182179ad0fe7757035ed67b218e8bd6c1128ead4632d562a8 not found: ID does not exist" containerID="6372c72ff6e3e68182179ad0fe7757035ed67b218e8bd6c1128ead4632d562a8" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.484055 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6372c72ff6e3e68182179ad0fe7757035ed67b218e8bd6c1128ead4632d562a8"} err="failed to get container status \"6372c72ff6e3e68182179ad0fe7757035ed67b218e8bd6c1128ead4632d562a8\": rpc error: code = NotFound desc = could not find container \"6372c72ff6e3e68182179ad0fe7757035ed67b218e8bd6c1128ead4632d562a8\": container with ID starting with 6372c72ff6e3e68182179ad0fe7757035ed67b218e8bd6c1128ead4632d562a8 not found: ID does not exist" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.484074 4919 scope.go:117] "RemoveContainer" containerID="aee4ba40d1a61efc00d2f7416884fe125f979e822f76800ac92f37d8dc5f9c9f" Sep 30 21:01:34 crc kubenswrapper[4919]: E0930 21:01:34.484326 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aee4ba40d1a61efc00d2f7416884fe125f979e822f76800ac92f37d8dc5f9c9f\": container with ID starting with aee4ba40d1a61efc00d2f7416884fe125f979e822f76800ac92f37d8dc5f9c9f not found: ID does not exist" containerID="aee4ba40d1a61efc00d2f7416884fe125f979e822f76800ac92f37d8dc5f9c9f" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.484346 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aee4ba40d1a61efc00d2f7416884fe125f979e822f76800ac92f37d8dc5f9c9f"} err="failed to get container status \"aee4ba40d1a61efc00d2f7416884fe125f979e822f76800ac92f37d8dc5f9c9f\": rpc error: code = NotFound desc = could not find container \"aee4ba40d1a61efc00d2f7416884fe125f979e822f76800ac92f37d8dc5f9c9f\": container with ID starting with aee4ba40d1a61efc00d2f7416884fe125f979e822f76800ac92f37d8dc5f9c9f not found: ID does not exist" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.486224 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Sep 30 21:01:34 crc kubenswrapper[4919]: E0930 21:01:34.486677 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerName="registry-server" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.486693 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerName="registry-server" Sep 30 21:01:34 crc kubenswrapper[4919]: E0930 21:01:34.486710 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerName="extract-content" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.486719 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerName="extract-content" Sep 30 21:01:34 crc kubenswrapper[4919]: E0930 21:01:34.486748 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerName="extract-utilities" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.486754 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerName="extract-utilities" Sep 30 21:01:34 crc kubenswrapper[4919]: E0930 21:01:34.486767 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788fa050-bf94-4ec2-b030-7dc6f5ecfef3" containerName="keystone-cron" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.486775 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="788fa050-bf94-4ec2-b030-7dc6f5ecfef3" containerName="keystone-cron" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.486978 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="788fa050-bf94-4ec2-b030-7dc6f5ecfef3" containerName="keystone-cron" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.486992 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" containerName="registry-server" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.488543 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.496017 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.496042 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.496098 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-frmkp" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.496187 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.497921 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.600228 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwwkk\" (UniqueName: \"kubernetes.io/projected/df15c633-42da-4bd4-8d99-042064dcb9cf-kube-api-access-mwwkk\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.600286 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/df15c633-42da-4bd4-8d99-042064dcb9cf-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.600326 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/df15c633-42da-4bd4-8d99-042064dcb9cf-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.600359 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/df15c633-42da-4bd4-8d99-042064dcb9cf-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.600392 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/df15c633-42da-4bd4-8d99-042064dcb9cf-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.600454 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/df15c633-42da-4bd4-8d99-042064dcb9cf-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.632899 4919 scope.go:117] "RemoveContainer" containerID="3f96202aad8d1794a110a8cc52909d307ffc55d5cfcf2348b38527ff1a21092f" Sep 30 21:01:34 crc kubenswrapper[4919]: E0930 21:01:34.633260 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.703170 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwwkk\" (UniqueName: \"kubernetes.io/projected/df15c633-42da-4bd4-8d99-042064dcb9cf-kube-api-access-mwwkk\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.703299 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/df15c633-42da-4bd4-8d99-042064dcb9cf-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.704299 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/df15c633-42da-4bd4-8d99-042064dcb9cf-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.704443 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/df15c633-42da-4bd4-8d99-042064dcb9cf-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.705022 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/df15c633-42da-4bd4-8d99-042064dcb9cf-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.705482 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/df15c633-42da-4bd4-8d99-042064dcb9cf-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.705658 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/df15c633-42da-4bd4-8d99-042064dcb9cf-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.708005 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/df15c633-42da-4bd4-8d99-042064dcb9cf-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.708649 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/df15c633-42da-4bd4-8d99-042064dcb9cf-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.710094 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/df15c633-42da-4bd4-8d99-042064dcb9cf-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.717569 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/df15c633-42da-4bd4-8d99-042064dcb9cf-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.728353 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwwkk\" (UniqueName: \"kubernetes.io/projected/df15c633-42da-4bd4-8d99-042064dcb9cf-kube-api-access-mwwkk\") pod \"alertmanager-metric-storage-0\" (UID: \"df15c633-42da-4bd4-8d99-042064dcb9cf\") " pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:34 crc kubenswrapper[4919]: I0930 21:01:34.819132 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.378829 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.518400 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.521632 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.526996 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.527707 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-6df5d" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.527903 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.528088 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.533628 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.541432 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.563842 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.629225 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/af3fb66e-cbac-480c-b048-12f8bf6c2013-config\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.629906 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-249bb36c-5009-420f-a4c3-0f5db65cd075\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-249bb36c-5009-420f-a4c3-0f5db65cd075\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.629946 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/af3fb66e-cbac-480c-b048-12f8bf6c2013-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.629977 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/af3fb66e-cbac-480c-b048-12f8bf6c2013-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.630063 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/af3fb66e-cbac-480c-b048-12f8bf6c2013-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.630088 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4phm\" (UniqueName: \"kubernetes.io/projected/af3fb66e-cbac-480c-b048-12f8bf6c2013-kube-api-access-m4phm\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.630139 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/af3fb66e-cbac-480c-b048-12f8bf6c2013-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.630163 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/af3fb66e-cbac-480c-b048-12f8bf6c2013-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.669610 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6ff1fff-89a9-42b1-98d1-f61f97e78a51" path="/var/lib/kubelet/pods/a6ff1fff-89a9-42b1-98d1-f61f97e78a51/volumes" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.732133 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-249bb36c-5009-420f-a4c3-0f5db65cd075\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-249bb36c-5009-420f-a4c3-0f5db65cd075\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.732528 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/af3fb66e-cbac-480c-b048-12f8bf6c2013-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.732558 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/af3fb66e-cbac-480c-b048-12f8bf6c2013-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.733134 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/af3fb66e-cbac-480c-b048-12f8bf6c2013-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.733720 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4phm\" (UniqueName: \"kubernetes.io/projected/af3fb66e-cbac-480c-b048-12f8bf6c2013-kube-api-access-m4phm\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.733806 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/af3fb66e-cbac-480c-b048-12f8bf6c2013-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.733838 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/af3fb66e-cbac-480c-b048-12f8bf6c2013-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.734063 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/af3fb66e-cbac-480c-b048-12f8bf6c2013-config\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.734752 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.736613 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.740914 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.741005 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.740943 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/af3fb66e-cbac-480c-b048-12f8bf6c2013-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.741013 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.741244 4919 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.741284 4919 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-249bb36c-5009-420f-a4c3-0f5db65cd075\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-249bb36c-5009-420f-a4c3-0f5db65cd075\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6d39462408d96732f69b05f041cd3a83d9633e1aadb98746956c81f2414edadd/globalmount\"" pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.743576 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/af3fb66e-cbac-480c-b048-12f8bf6c2013-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.747776 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/af3fb66e-cbac-480c-b048-12f8bf6c2013-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.747799 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/af3fb66e-cbac-480c-b048-12f8bf6c2013-config\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.749035 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/af3fb66e-cbac-480c-b048-12f8bf6c2013-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.750584 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/af3fb66e-cbac-480c-b048-12f8bf6c2013-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.754745 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4phm\" (UniqueName: \"kubernetes.io/projected/af3fb66e-cbac-480c-b048-12f8bf6c2013-kube-api-access-m4phm\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.789717 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-249bb36c-5009-420f-a4c3-0f5db65cd075\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-249bb36c-5009-420f-a4c3-0f5db65cd075\") pod \"prometheus-metric-storage-0\" (UID: \"af3fb66e-cbac-480c-b048-12f8bf6c2013\") " pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.868951 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-6df5d" Sep 30 21:01:35 crc kubenswrapper[4919]: I0930 21:01:35.872783 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Sep 30 21:01:36 crc kubenswrapper[4919]: I0930 21:01:36.332475 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"df15c633-42da-4bd4-8d99-042064dcb9cf","Type":"ContainerStarted","Data":"b6d2a231c2cdcaab9f30bddf98640db6eedb02f1be16db22d2d087704e9e833f"} Sep 30 21:01:36 crc kubenswrapper[4919]: I0930 21:01:36.356089 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Sep 30 21:01:37 crc kubenswrapper[4919]: I0930 21:01:37.358390 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af3fb66e-cbac-480c-b048-12f8bf6c2013","Type":"ContainerStarted","Data":"0e856b4414f69803d6f37d527682c86d6f4f67ed920f8f29435347971ed10285"} Sep 30 21:01:38 crc kubenswrapper[4919]: I0930 21:01:38.368269 4919 generic.go:334] "Generic (PLEG): container finished" podID="f781d002-f3f5-43e1-863c-ceb1fd87ec79" containerID="bc5a3a38e243601d2c4fd466bc4994264150ca77abe99bdc24de352b55d7425f" exitCode=0 Sep 30 21:01:38 crc kubenswrapper[4919]: I0930 21:01:38.368351 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-dw2jq" event={"ID":"f781d002-f3f5-43e1-863c-ceb1fd87ec79","Type":"ContainerDied","Data":"bc5a3a38e243601d2c4fd466bc4994264150ca77abe99bdc24de352b55d7425f"} Sep 30 21:01:39 crc kubenswrapper[4919]: I0930 21:01:39.423716 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:01:39 crc kubenswrapper[4919]: I0930 21:01:39.424672 4919 scope.go:117] "RemoveContainer" containerID="520b0db5f1472337aa0f061e7447c42d35f79bc64d466b90cb3afd355d7b22d9" Sep 30 21:01:39 crc kubenswrapper[4919]: I0930 21:01:39.592343 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:01:39 crc kubenswrapper[4919]: I0930 21:01:39.593511 4919 scope.go:117] "RemoveContainer" containerID="3f96202aad8d1794a110a8cc52909d307ffc55d5cfcf2348b38527ff1a21092f" Sep 30 21:01:40 crc kubenswrapper[4919]: I0930 21:01:40.338560 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-ingester-0" Sep 30 21:01:40 crc kubenswrapper[4919]: I0930 21:01:40.402391 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-dw2jq" event={"ID":"f781d002-f3f5-43e1-863c-ceb1fd87ec79","Type":"ContainerDied","Data":"28bd30131c3b2c881aa2ab00b72d23f0a350aaa38a4a669d5940c410add08de3"} Sep 30 21:01:40 crc kubenswrapper[4919]: I0930 21:01:40.402714 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28bd30131c3b2c881aa2ab00b72d23f0a350aaa38a4a669d5940c410add08de3" Sep 30 21:01:40 crc kubenswrapper[4919]: I0930 21:01:40.973823 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.075976 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f781d002-f3f5-43e1-863c-ceb1fd87ec79-certs\") pod \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.076103 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-combined-ca-bundle\") pod \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.076171 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-scripts\") pod \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.076563 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mbmdg\" (UniqueName: \"kubernetes.io/projected/f781d002-f3f5-43e1-863c-ceb1fd87ec79-kube-api-access-mbmdg\") pod \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.076629 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-config-data\") pod \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\" (UID: \"f781d002-f3f5-43e1-863c-ceb1fd87ec79\") " Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.357579 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f781d002-f3f5-43e1-863c-ceb1fd87ec79-certs" (OuterVolumeSpecName: "certs") pod "f781d002-f3f5-43e1-863c-ceb1fd87ec79" (UID: "f781d002-f3f5-43e1-863c-ceb1fd87ec79"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.366660 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f781d002-f3f5-43e1-863c-ceb1fd87ec79-kube-api-access-mbmdg" (OuterVolumeSpecName: "kube-api-access-mbmdg") pod "f781d002-f3f5-43e1-863c-ceb1fd87ec79" (UID: "f781d002-f3f5-43e1-863c-ceb1fd87ec79"). InnerVolumeSpecName "kube-api-access-mbmdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.383468 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mbmdg\" (UniqueName: \"kubernetes.io/projected/f781d002-f3f5-43e1-863c-ceb1fd87ec79-kube-api-access-mbmdg\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.383495 4919 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f781d002-f3f5-43e1-863c-ceb1fd87ec79-certs\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.434704 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-dw2jq" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.434954 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" event={"ID":"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3","Type":"ContainerStarted","Data":"bede75dc9e0058320ead34f6aee078da7746241a1554047cabe89f025ee99b23"} Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.436297 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.441137 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" containerName="gateway" probeResult="failure" output="Get \"https://10.217.0.251:8081/ready\": dial tcp 10.217.0.251:8081: connect: connection refused" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.484253 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podStartSLOduration=56.84657138 podStartE2EDuration="1m2.484225185s" podCreationTimestamp="2025-09-30 21:00:39 +0000 UTC" firstStartedPulling="2025-09-30 21:00:40.20285915 +0000 UTC m=+2825.318892277" lastFinishedPulling="2025-09-30 21:00:45.840512945 +0000 UTC m=+2830.956546082" observedRunningTime="2025-09-30 21:01:41.453492476 +0000 UTC m=+2886.569525603" watchObservedRunningTime="2025-09-30 21:01:41.484225185 +0000 UTC m=+2886.600258312" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.562540 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-scripts" (OuterVolumeSpecName: "scripts") pod "f781d002-f3f5-43e1-863c-ceb1fd87ec79" (UID: "f781d002-f3f5-43e1-863c-ceb1fd87ec79"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.605325 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.652604 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f781d002-f3f5-43e1-863c-ceb1fd87ec79" (UID: "f781d002-f3f5-43e1-863c-ceb1fd87ec79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.696653 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-config-data" (OuterVolumeSpecName: "config-data") pod "f781d002-f3f5-43e1-863c-ceb1fd87ec79" (UID: "f781d002-f3f5-43e1-863c-ceb1fd87ec79"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.707796 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:41 crc kubenswrapper[4919]: I0930 21:01:41.707826 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f781d002-f3f5-43e1-863c-ceb1fd87ec79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.215726 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-gnq6c"] Sep 30 21:01:42 crc kubenswrapper[4919]: E0930 21:01:42.216262 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f781d002-f3f5-43e1-863c-ceb1fd87ec79" containerName="cloudkitty-db-sync" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.216279 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="f781d002-f3f5-43e1-863c-ceb1fd87ec79" containerName="cloudkitty-db-sync" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.216559 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="f781d002-f3f5-43e1-863c-ceb1fd87ec79" containerName="cloudkitty-db-sync" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.217518 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.224264 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.224673 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.224878 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.226108 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-m6hm4" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.228793 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-gnq6c"] Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.320673 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-config-data\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.320931 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-scripts\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.320986 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-combined-ca-bundle\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.321077 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-certs\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.321160 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmx76\" (UniqueName: \"kubernetes.io/projected/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-kube-api-access-hmx76\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.423328 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmx76\" (UniqueName: \"kubernetes.io/projected/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-kube-api-access-hmx76\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.423418 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-config-data\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.423457 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-scripts\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.423585 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-combined-ca-bundle\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.423713 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-certs\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.430320 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-certs\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.430602 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-combined-ca-bundle\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.432672 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-scripts\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.433096 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-config-data\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.442702 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmx76\" (UniqueName: \"kubernetes.io/projected/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-kube-api-access-hmx76\") pod \"cloudkitty-storageinit-gnq6c\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.452639 4919 generic.go:334] "Generic (PLEG): container finished" podID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" containerID="bede75dc9e0058320ead34f6aee078da7746241a1554047cabe89f025ee99b23" exitCode=2 Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.452870 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" event={"ID":"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3","Type":"ContainerDied","Data":"bede75dc9e0058320ead34f6aee078da7746241a1554047cabe89f025ee99b23"} Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.453022 4919 scope.go:117] "RemoveContainer" containerID="520b0db5f1472337aa0f061e7447c42d35f79bc64d466b90cb3afd355d7b22d9" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.454317 4919 scope.go:117] "RemoveContainer" containerID="bede75dc9e0058320ead34f6aee078da7746241a1554047cabe89f025ee99b23" Sep 30 21:01:42 crc kubenswrapper[4919]: E0930 21:01:42.454927 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.464455 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af3fb66e-cbac-480c-b048-12f8bf6c2013","Type":"ContainerStarted","Data":"0133d5e8b92e5f85b03545cf3c01aeeaba076829927162162b6b592f133ec5bb"} Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.468344 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"df15c633-42da-4bd4-8d99-042064dcb9cf","Type":"ContainerStarted","Data":"dd3f4e96560917fdb8f09db2f0e721d60e5d416f5629df8c9a43db78dbfddf8e"} Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.471870 4919 generic.go:334] "Generic (PLEG): container finished" podID="0dde18df-d1bd-4b36-82af-cd0967cd942b" containerID="96d1912bebbb471f87d9d0fb4434d899c6413ca7edf420dd2c86c578932751ee" exitCode=2 Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.471931 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" event={"ID":"0dde18df-d1bd-4b36-82af-cd0967cd942b","Type":"ContainerDied","Data":"96d1912bebbb471f87d9d0fb4434d899c6413ca7edf420dd2c86c578932751ee"} Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.472967 4919 scope.go:117] "RemoveContainer" containerID="96d1912bebbb471f87d9d0fb4434d899c6413ca7edf420dd2c86c578932751ee" Sep 30 21:01:42 crc kubenswrapper[4919]: E0930 21:01:42.473550 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.570669 4919 scope.go:117] "RemoveContainer" containerID="3f96202aad8d1794a110a8cc52909d307ffc55d5cfcf2348b38527ff1a21092f" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.616571 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:42 crc kubenswrapper[4919]: I0930 21:01:42.865071 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-54bc95c9fb-222zn" Sep 30 21:01:43 crc kubenswrapper[4919]: I0930 21:01:43.151182 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-gnq6c"] Sep 30 21:01:43 crc kubenswrapper[4919]: W0930 21:01:43.154924 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4df6b31_fbe9_43f8_ac28_3d5933009f2b.slice/crio-b91819f826475d6986aff03d7d8fc323f131d0a7fe71ced88b21f903814692bf WatchSource:0}: Error finding container b91819f826475d6986aff03d7d8fc323f131d0a7fe71ced88b21f903814692bf: Status 404 returned error can't find the container with id b91819f826475d6986aff03d7d8fc323f131d0a7fe71ced88b21f903814692bf Sep 30 21:01:43 crc kubenswrapper[4919]: I0930 21:01:43.491706 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-gnq6c" event={"ID":"b4df6b31-fbe9-43f8-ac28-3d5933009f2b","Type":"ContainerStarted","Data":"b3fca1e850adc43d46e0b45e90ab8cf4e160c1f769ff93aa7183e5f8608517c2"} Sep 30 21:01:43 crc kubenswrapper[4919]: I0930 21:01:43.491991 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-gnq6c" event={"ID":"b4df6b31-fbe9-43f8-ac28-3d5933009f2b","Type":"ContainerStarted","Data":"b91819f826475d6986aff03d7d8fc323f131d0a7fe71ced88b21f903814692bf"} Sep 30 21:01:43 crc kubenswrapper[4919]: I0930 21:01:43.496681 4919 scope.go:117] "RemoveContainer" containerID="bede75dc9e0058320ead34f6aee078da7746241a1554047cabe89f025ee99b23" Sep 30 21:01:43 crc kubenswrapper[4919]: E0930 21:01:43.496998 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:01:43 crc kubenswrapper[4919]: I0930 21:01:43.515631 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-gnq6c" podStartSLOduration=1.515600804 podStartE2EDuration="1.515600804s" podCreationTimestamp="2025-09-30 21:01:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:01:43.5095812 +0000 UTC m=+2888.625614327" watchObservedRunningTime="2025-09-30 21:01:43.515600804 +0000 UTC m=+2888.631633931" Sep 30 21:01:44 crc kubenswrapper[4919]: I0930 21:01:44.592705 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:01:44 crc kubenswrapper[4919]: I0930 21:01:44.593723 4919 scope.go:117] "RemoveContainer" containerID="96d1912bebbb471f87d9d0fb4434d899c6413ca7edf420dd2c86c578932751ee" Sep 30 21:01:44 crc kubenswrapper[4919]: E0930 21:01:44.593968 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:01:47 crc kubenswrapper[4919]: I0930 21:01:47.526685 4919 generic.go:334] "Generic (PLEG): container finished" podID="b4df6b31-fbe9-43f8-ac28-3d5933009f2b" containerID="b3fca1e850adc43d46e0b45e90ab8cf4e160c1f769ff93aa7183e5f8608517c2" exitCode=0 Sep 30 21:01:47 crc kubenswrapper[4919]: I0930 21:01:47.526764 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-gnq6c" event={"ID":"b4df6b31-fbe9-43f8-ac28-3d5933009f2b","Type":"ContainerDied","Data":"b3fca1e850adc43d46e0b45e90ab8cf4e160c1f769ff93aa7183e5f8608517c2"} Sep 30 21:01:48 crc kubenswrapper[4919]: I0930 21:01:48.537414 4919 generic.go:334] "Generic (PLEG): container finished" podID="df15c633-42da-4bd4-8d99-042064dcb9cf" containerID="dd3f4e96560917fdb8f09db2f0e721d60e5d416f5629df8c9a43db78dbfddf8e" exitCode=0 Sep 30 21:01:48 crc kubenswrapper[4919]: I0930 21:01:48.537514 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"df15c633-42da-4bd4-8d99-042064dcb9cf","Type":"ContainerDied","Data":"dd3f4e96560917fdb8f09db2f0e721d60e5d416f5629df8c9a43db78dbfddf8e"} Sep 30 21:01:48 crc kubenswrapper[4919]: I0930 21:01:48.539455 4919 generic.go:334] "Generic (PLEG): container finished" podID="af3fb66e-cbac-480c-b048-12f8bf6c2013" containerID="0133d5e8b92e5f85b03545cf3c01aeeaba076829927162162b6b592f133ec5bb" exitCode=0 Sep 30 21:01:48 crc kubenswrapper[4919]: I0930 21:01:48.539553 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af3fb66e-cbac-480c-b048-12f8bf6c2013","Type":"ContainerDied","Data":"0133d5e8b92e5f85b03545cf3c01aeeaba076829927162162b6b592f133ec5bb"} Sep 30 21:01:48 crc kubenswrapper[4919]: I0930 21:01:48.972776 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.134826 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-combined-ca-bundle\") pod \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.135440 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-certs\") pod \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.135499 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmx76\" (UniqueName: \"kubernetes.io/projected/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-kube-api-access-hmx76\") pod \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.135568 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-scripts\") pod \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.135712 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-config-data\") pod \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\" (UID: \"b4df6b31-fbe9-43f8-ac28-3d5933009f2b\") " Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.142153 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-scripts" (OuterVolumeSpecName: "scripts") pod "b4df6b31-fbe9-43f8-ac28-3d5933009f2b" (UID: "b4df6b31-fbe9-43f8-ac28-3d5933009f2b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.142160 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-certs" (OuterVolumeSpecName: "certs") pod "b4df6b31-fbe9-43f8-ac28-3d5933009f2b" (UID: "b4df6b31-fbe9-43f8-ac28-3d5933009f2b"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.143644 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-kube-api-access-hmx76" (OuterVolumeSpecName: "kube-api-access-hmx76") pod "b4df6b31-fbe9-43f8-ac28-3d5933009f2b" (UID: "b4df6b31-fbe9-43f8-ac28-3d5933009f2b"). InnerVolumeSpecName "kube-api-access-hmx76". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.172197 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-config-data" (OuterVolumeSpecName: "config-data") pod "b4df6b31-fbe9-43f8-ac28-3d5933009f2b" (UID: "b4df6b31-fbe9-43f8-ac28-3d5933009f2b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.180610 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4df6b31-fbe9-43f8-ac28-3d5933009f2b" (UID: "b4df6b31-fbe9-43f8-ac28-3d5933009f2b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.238663 4919 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-certs\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.238888 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmx76\" (UniqueName: \"kubernetes.io/projected/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-kube-api-access-hmx76\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.239724 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.239773 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.239793 4919 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4df6b31-fbe9-43f8-ac28-3d5933009f2b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.552655 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-gnq6c" event={"ID":"b4df6b31-fbe9-43f8-ac28-3d5933009f2b","Type":"ContainerDied","Data":"b91819f826475d6986aff03d7d8fc323f131d0a7fe71ced88b21f903814692bf"} Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.552694 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b91819f826475d6986aff03d7d8fc323f131d0a7fe71ced88b21f903814692bf" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.552761 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-gnq6c" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.721234 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Sep 30 21:01:49 crc kubenswrapper[4919]: E0930 21:01:49.722856 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4df6b31-fbe9-43f8-ac28-3d5933009f2b" containerName="cloudkitty-storageinit" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.722974 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4df6b31-fbe9-43f8-ac28-3d5933009f2b" containerName="cloudkitty-storageinit" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.723384 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4df6b31-fbe9-43f8-ac28-3d5933009f2b" containerName="cloudkitty-storageinit" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.726546 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.732581 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.736348 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.736555 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.736830 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.736982 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-m6hm4" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.737121 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.853158 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.854959 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.855783 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/983be098-678e-4ecb-a684-7874ae171f14-config-data\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.855866 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9jtx\" (UniqueName: \"kubernetes.io/projected/983be098-678e-4ecb-a684-7874ae171f14-kube-api-access-b9jtx\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.856414 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/983be098-678e-4ecb-a684-7874ae171f14-scripts\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.856492 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/983be098-678e-4ecb-a684-7874ae171f14-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.856523 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/983be098-678e-4ecb-a684-7874ae171f14-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.856569 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/983be098-678e-4ecb-a684-7874ae171f14-certs\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.858434 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.876885 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.958599 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/983be098-678e-4ecb-a684-7874ae171f14-certs\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.958659 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72286461-f967-4cb3-8ed4-6387ad420acb-logs\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.958718 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9rj7\" (UniqueName: \"kubernetes.io/projected/72286461-f967-4cb3-8ed4-6387ad420acb-kube-api-access-n9rj7\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.958761 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72286461-f967-4cb3-8ed4-6387ad420acb-config-data\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.958802 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/983be098-678e-4ecb-a684-7874ae171f14-config-data\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.958828 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/72286461-f967-4cb3-8ed4-6387ad420acb-certs\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.958848 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72286461-f967-4cb3-8ed4-6387ad420acb-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.958870 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9jtx\" (UniqueName: \"kubernetes.io/projected/983be098-678e-4ecb-a684-7874ae171f14-kube-api-access-b9jtx\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.958910 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72286461-f967-4cb3-8ed4-6387ad420acb-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.958940 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72286461-f967-4cb3-8ed4-6387ad420acb-scripts\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.958961 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/983be098-678e-4ecb-a684-7874ae171f14-scripts\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.959001 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/983be098-678e-4ecb-a684-7874ae171f14-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.959018 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/983be098-678e-4ecb-a684-7874ae171f14-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.970292 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/983be098-678e-4ecb-a684-7874ae171f14-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.974910 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/983be098-678e-4ecb-a684-7874ae171f14-certs\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.980786 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/983be098-678e-4ecb-a684-7874ae171f14-scripts\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.981856 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/983be098-678e-4ecb-a684-7874ae171f14-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.986314 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/983be098-678e-4ecb-a684-7874ae171f14-config-data\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:49 crc kubenswrapper[4919]: I0930 21:01:49.986320 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9jtx\" (UniqueName: \"kubernetes.io/projected/983be098-678e-4ecb-a684-7874ae171f14-kube-api-access-b9jtx\") pod \"cloudkitty-proc-0\" (UID: \"983be098-678e-4ecb-a684-7874ae171f14\") " pod="openstack/cloudkitty-proc-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.062298 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72286461-f967-4cb3-8ed4-6387ad420acb-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.062356 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72286461-f967-4cb3-8ed4-6387ad420acb-scripts\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.062416 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72286461-f967-4cb3-8ed4-6387ad420acb-logs\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.062447 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9rj7\" (UniqueName: \"kubernetes.io/projected/72286461-f967-4cb3-8ed4-6387ad420acb-kube-api-access-n9rj7\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.062485 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72286461-f967-4cb3-8ed4-6387ad420acb-config-data\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.062527 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/72286461-f967-4cb3-8ed4-6387ad420acb-certs\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.062545 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72286461-f967-4cb3-8ed4-6387ad420acb-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.063504 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72286461-f967-4cb3-8ed4-6387ad420acb-logs\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.064990 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.068424 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/72286461-f967-4cb3-8ed4-6387ad420acb-certs\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.068800 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72286461-f967-4cb3-8ed4-6387ad420acb-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.069380 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72286461-f967-4cb3-8ed4-6387ad420acb-config-data\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.093708 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72286461-f967-4cb3-8ed4-6387ad420acb-scripts\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.097107 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/72286461-f967-4cb3-8ed4-6387ad420acb-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.101664 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9rj7\" (UniqueName: \"kubernetes.io/projected/72286461-f967-4cb3-8ed4-6387ad420acb-kube-api-access-n9rj7\") pod \"cloudkitty-api-0\" (UID: \"72286461-f967-4cb3-8ed4-6387ad420acb\") " pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.194938 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.787417 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Sep 30 21:01:50 crc kubenswrapper[4919]: W0930 21:01:50.790678 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod983be098_678e_4ecb_a684_7874ae171f14.slice/crio-ae72569a5c9934d2ed7a610f00fd2009046f18a60e6a156d1788c7848f92b698 WatchSource:0}: Error finding container ae72569a5c9934d2ed7a610f00fd2009046f18a60e6a156d1788c7848f92b698: Status 404 returned error can't find the container with id ae72569a5c9934d2ed7a610f00fd2009046f18a60e6a156d1788c7848f92b698 Sep 30 21:01:50 crc kubenswrapper[4919]: I0930 21:01:50.952206 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Sep 30 21:01:50 crc kubenswrapper[4919]: W0930 21:01:50.958393 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72286461_f967_4cb3_8ed4_6387ad420acb.slice/crio-99ad613a096821e89249c43adf7da04c534416e3d4235d3deddc8a64c814237b WatchSource:0}: Error finding container 99ad613a096821e89249c43adf7da04c534416e3d4235d3deddc8a64c814237b: Status 404 returned error can't find the container with id 99ad613a096821e89249c43adf7da04c534416e3d4235d3deddc8a64c814237b Sep 30 21:01:51 crc kubenswrapper[4919]: I0930 21:01:51.586723 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"72286461-f967-4cb3-8ed4-6387ad420acb","Type":"ContainerStarted","Data":"16bea8d253ddf891a7eaaf1a37bf19b5da5c528332c0cde99b84924024f375d7"} Sep 30 21:01:51 crc kubenswrapper[4919]: I0930 21:01:51.587075 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Sep 30 21:01:51 crc kubenswrapper[4919]: I0930 21:01:51.587091 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"72286461-f967-4cb3-8ed4-6387ad420acb","Type":"ContainerStarted","Data":"c67065817a74c61a78dbdad88a2d8b4bd1d1c702aa2152237eb134112e048cd5"} Sep 30 21:01:51 crc kubenswrapper[4919]: I0930 21:01:51.587104 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"72286461-f967-4cb3-8ed4-6387ad420acb","Type":"ContainerStarted","Data":"99ad613a096821e89249c43adf7da04c534416e3d4235d3deddc8a64c814237b"} Sep 30 21:01:51 crc kubenswrapper[4919]: I0930 21:01:51.589607 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerStarted","Data":"ae72569a5c9934d2ed7a610f00fd2009046f18a60e6a156d1788c7848f92b698"} Sep 30 21:01:51 crc kubenswrapper[4919]: I0930 21:01:51.605628 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=2.6056041629999998 podStartE2EDuration="2.605604163s" podCreationTimestamp="2025-09-30 21:01:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-30 21:01:51.599684622 +0000 UTC m=+2896.715717749" watchObservedRunningTime="2025-09-30 21:01:51.605604163 +0000 UTC m=+2896.721637290" Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.188011 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-25ld5/must-gather-cg45r"] Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.191183 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/must-gather-cg45r" Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.192714 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-25ld5"/"openshift-service-ca.crt" Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.200277 4919 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-25ld5"/"kube-root-ca.crt" Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.200284 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-25ld5"/"default-dockercfg-88mjv" Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.231657 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-25ld5/must-gather-cg45r"] Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.309597 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrv4g\" (UniqueName: \"kubernetes.io/projected/637f326d-ce6d-43e0-a286-3619fa7bda84-kube-api-access-mrv4g\") pod \"must-gather-cg45r\" (UID: \"637f326d-ce6d-43e0-a286-3619fa7bda84\") " pod="openshift-must-gather-25ld5/must-gather-cg45r" Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.309928 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/637f326d-ce6d-43e0-a286-3619fa7bda84-must-gather-output\") pod \"must-gather-cg45r\" (UID: \"637f326d-ce6d-43e0-a286-3619fa7bda84\") " pod="openshift-must-gather-25ld5/must-gather-cg45r" Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.412163 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrv4g\" (UniqueName: \"kubernetes.io/projected/637f326d-ce6d-43e0-a286-3619fa7bda84-kube-api-access-mrv4g\") pod \"must-gather-cg45r\" (UID: \"637f326d-ce6d-43e0-a286-3619fa7bda84\") " pod="openshift-must-gather-25ld5/must-gather-cg45r" Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.412325 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/637f326d-ce6d-43e0-a286-3619fa7bda84-must-gather-output\") pod \"must-gather-cg45r\" (UID: \"637f326d-ce6d-43e0-a286-3619fa7bda84\") " pod="openshift-must-gather-25ld5/must-gather-cg45r" Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.413482 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/637f326d-ce6d-43e0-a286-3619fa7bda84-must-gather-output\") pod \"must-gather-cg45r\" (UID: \"637f326d-ce6d-43e0-a286-3619fa7bda84\") " pod="openshift-must-gather-25ld5/must-gather-cg45r" Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.471131 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrv4g\" (UniqueName: \"kubernetes.io/projected/637f326d-ce6d-43e0-a286-3619fa7bda84-kube-api-access-mrv4g\") pod \"must-gather-cg45r\" (UID: \"637f326d-ce6d-43e0-a286-3619fa7bda84\") " pod="openshift-must-gather-25ld5/must-gather-cg45r" Sep 30 21:01:55 crc kubenswrapper[4919]: I0930 21:01:55.532801 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/must-gather-cg45r" Sep 30 21:01:56 crc kubenswrapper[4919]: I0930 21:01:56.061854 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:01:56 crc kubenswrapper[4919]: I0930 21:01:56.061909 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:01:57 crc kubenswrapper[4919]: I0930 21:01:57.632150 4919 scope.go:117] "RemoveContainer" containerID="bede75dc9e0058320ead34f6aee078da7746241a1554047cabe89f025ee99b23" Sep 30 21:01:57 crc kubenswrapper[4919]: E0930 21:01:57.633259 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:01:58 crc kubenswrapper[4919]: I0930 21:01:58.480013 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-25ld5/must-gather-cg45r"] Sep 30 21:01:58 crc kubenswrapper[4919]: I0930 21:01:58.694282 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"df15c633-42da-4bd4-8d99-042064dcb9cf","Type":"ContainerStarted","Data":"deb6da195fcfe695d97ea01221f7cbdc9307cded58eea1dd9543f20d6c71dcc2"} Sep 30 21:01:58 crc kubenswrapper[4919]: I0930 21:01:58.695983 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerStarted","Data":"c5df836c6de7c277fef3fb5511ad9c07fa0430efc40eeb3950992e973ff1533d"} Sep 30 21:01:58 crc kubenswrapper[4919]: I0930 21:01:58.697409 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25ld5/must-gather-cg45r" event={"ID":"637f326d-ce6d-43e0-a286-3619fa7bda84","Type":"ContainerStarted","Data":"3904fc7b149c24f9e7874718a8fca02b920c2146c43265b9379001a94a6e83ab"} Sep 30 21:01:58 crc kubenswrapper[4919]: I0930 21:01:58.699384 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af3fb66e-cbac-480c-b048-12f8bf6c2013","Type":"ContainerStarted","Data":"83e3c469dd12eba036f81a67d6698006ca01ed6a477b01fb60835dfd937ad9b6"} Sep 30 21:01:58 crc kubenswrapper[4919]: I0930 21:01:58.730997 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=2.498792834 podStartE2EDuration="9.730975214s" podCreationTimestamp="2025-09-30 21:01:49 +0000 UTC" firstStartedPulling="2025-09-30 21:01:50.793149106 +0000 UTC m=+2895.909182233" lastFinishedPulling="2025-09-30 21:01:58.025331486 +0000 UTC m=+2903.141364613" observedRunningTime="2025-09-30 21:01:58.720673126 +0000 UTC m=+2903.836706253" watchObservedRunningTime="2025-09-30 21:01:58.730975214 +0000 UTC m=+2903.847008351" Sep 30 21:01:59 crc kubenswrapper[4919]: I0930 21:01:59.633665 4919 scope.go:117] "RemoveContainer" containerID="96d1912bebbb471f87d9d0fb4434d899c6413ca7edf420dd2c86c578932751ee" Sep 30 21:01:59 crc kubenswrapper[4919]: E0930 21:01:59.634341 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:02:02 crc kubenswrapper[4919]: I0930 21:02:02.370703 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:02:02 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:02:02 crc kubenswrapper[4919]: > Sep 30 21:02:02 crc kubenswrapper[4919]: I0930 21:02:02.780363 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"df15c633-42da-4bd4-8d99-042064dcb9cf","Type":"ContainerStarted","Data":"866e7837952b9af06111fd96db9e91d707b0ad53025db2e9c9546229bfe49090"} Sep 30 21:02:02 crc kubenswrapper[4919]: I0930 21:02:02.781881 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Sep 30 21:02:02 crc kubenswrapper[4919]: I0930 21:02:02.789520 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Sep 30 21:02:02 crc kubenswrapper[4919]: I0930 21:02:02.815250 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af3fb66e-cbac-480c-b048-12f8bf6c2013","Type":"ContainerStarted","Data":"9ca482ef538aeb10d45a66f17de6582d5b4f707169c1650630730e92b1a29437"} Sep 30 21:02:02 crc kubenswrapper[4919]: I0930 21:02:02.819798 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=6.194184446 podStartE2EDuration="28.819772216s" podCreationTimestamp="2025-09-30 21:01:34 +0000 UTC" firstStartedPulling="2025-09-30 21:01:35.389349716 +0000 UTC m=+2880.505382833" lastFinishedPulling="2025-09-30 21:01:58.014937476 +0000 UTC m=+2903.130970603" observedRunningTime="2025-09-30 21:02:02.815337247 +0000 UTC m=+2907.931370374" watchObservedRunningTime="2025-09-30 21:02:02.819772216 +0000 UTC m=+2907.935805333" Sep 30 21:02:03 crc kubenswrapper[4919]: I0930 21:02:03.870793 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:02:03 crc kubenswrapper[4919]: I0930 21:02:03.871277 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="ceilometer-central-agent" containerID="cri-o://daeecf66698be80fe3c7574c410c95c80277fe3e8c672e767f6cfd54c4122920" gracePeriod=30 Sep 30 21:02:03 crc kubenswrapper[4919]: I0930 21:02:03.871404 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="proxy-httpd" containerID="cri-o://eec54927e84a35714bf5b672947e80c2c5b4256f5745ec9e62afd0edb8252af7" gracePeriod=30 Sep 30 21:02:03 crc kubenswrapper[4919]: I0930 21:02:03.871463 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="sg-core" containerID="cri-o://6e4704d4dc34d5bad4423ee87075b553bebf0a480670ced4b26e894d4cfc2781" gracePeriod=30 Sep 30 21:02:03 crc kubenswrapper[4919]: I0930 21:02:03.871499 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="ceilometer-notification-agent" containerID="cri-o://09af640df83d0ef1807067dc43c011a841ea3c7acf0e768f58ab60c4c08fc840" gracePeriod=30 Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.836794 4919 generic.go:334] "Generic (PLEG): container finished" podID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerID="eec54927e84a35714bf5b672947e80c2c5b4256f5745ec9e62afd0edb8252af7" exitCode=0 Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.837321 4919 generic.go:334] "Generic (PLEG): container finished" podID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerID="6e4704d4dc34d5bad4423ee87075b553bebf0a480670ced4b26e894d4cfc2781" exitCode=2 Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.837330 4919 generic.go:334] "Generic (PLEG): container finished" podID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerID="09af640df83d0ef1807067dc43c011a841ea3c7acf0e768f58ab60c4c08fc840" exitCode=0 Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.837336 4919 generic.go:334] "Generic (PLEG): container finished" podID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerID="daeecf66698be80fe3c7574c410c95c80277fe3e8c672e767f6cfd54c4122920" exitCode=0 Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.837385 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367","Type":"ContainerDied","Data":"eec54927e84a35714bf5b672947e80c2c5b4256f5745ec9e62afd0edb8252af7"} Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.837442 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367","Type":"ContainerDied","Data":"6e4704d4dc34d5bad4423ee87075b553bebf0a480670ced4b26e894d4cfc2781"} Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.837452 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367","Type":"ContainerDied","Data":"09af640df83d0ef1807067dc43c011a841ea3c7acf0e768f58ab60c4c08fc840"} Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.837463 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367","Type":"ContainerDied","Data":"daeecf66698be80fe3c7574c410c95c80277fe3e8c672e767f6cfd54c4122920"} Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.837472 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367","Type":"ContainerDied","Data":"3922a09ca5ae95614de42d3f06df8d0024ff39b73527313604e41d1e9dc0638a"} Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.837482 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3922a09ca5ae95614de42d3f06df8d0024ff39b73527313604e41d1e9dc0638a" Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.913651 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.997786 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-scripts\") pod \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.997885 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-run-httpd\") pod \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.998042 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vww6g\" (UniqueName: \"kubernetes.io/projected/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-kube-api-access-vww6g\") pod \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.998114 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-sg-core-conf-yaml\") pod \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.998138 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-log-httpd\") pod \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.998578 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-config-data\") pod \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\" (UID: \"7bfa6a29-fbaa-43d1-9d10-dcd440f5a367\") " Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.998728 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" (UID: "7bfa6a29-fbaa-43d1-9d10-dcd440f5a367"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:02:04 crc kubenswrapper[4919]: I0930 21:02:04.999107 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" (UID: "7bfa6a29-fbaa-43d1-9d10-dcd440f5a367"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:04.999480 4919 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:04.999499 4919 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.004891 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-kube-api-access-vww6g" (OuterVolumeSpecName: "kube-api-access-vww6g") pod "7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" (UID: "7bfa6a29-fbaa-43d1-9d10-dcd440f5a367"). InnerVolumeSpecName "kube-api-access-vww6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.008513 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-scripts" (OuterVolumeSpecName: "scripts") pod "7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" (UID: "7bfa6a29-fbaa-43d1-9d10-dcd440f5a367"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.039736 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" (UID: "7bfa6a29-fbaa-43d1-9d10-dcd440f5a367"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.101462 4919 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.101833 4919 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-scripts\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.101844 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vww6g\" (UniqueName: \"kubernetes.io/projected/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-kube-api-access-vww6g\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.185421 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-config-data" (OuterVolumeSpecName: "config-data") pod "7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" (UID: "7bfa6a29-fbaa-43d1-9d10-dcd440f5a367"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.203517 4919 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367-config-data\") on node \"crc\" DevicePath \"\"" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.382671 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:02:05 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:02:05 crc kubenswrapper[4919]: > Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.851443 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.852889 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25ld5/must-gather-cg45r" event={"ID":"637f326d-ce6d-43e0-a286-3619fa7bda84","Type":"ContainerStarted","Data":"7b778bec46597d05b7792806bcfe0d94bcdfb34c71cfce5c9556d2df1fd3e192"} Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.852921 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25ld5/must-gather-cg45r" event={"ID":"637f326d-ce6d-43e0-a286-3619fa7bda84","Type":"ContainerStarted","Data":"2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e"} Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.880584 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-25ld5/must-gather-cg45r" podStartSLOduration=4.917930447 podStartE2EDuration="10.880568598s" podCreationTimestamp="2025-09-30 21:01:55 +0000 UTC" firstStartedPulling="2025-09-30 21:01:58.448847285 +0000 UTC m=+2903.564880422" lastFinishedPulling="2025-09-30 21:02:04.411485446 +0000 UTC m=+2909.527518573" observedRunningTime="2025-09-30 21:02:05.866268735 +0000 UTC m=+2910.982301872" watchObservedRunningTime="2025-09-30 21:02:05.880568598 +0000 UTC m=+2910.996601725" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.921329 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.940303 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.951202 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:02:05 crc kubenswrapper[4919]: E0930 21:02:05.952578 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="ceilometer-central-agent" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.952604 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="ceilometer-central-agent" Sep 30 21:02:05 crc kubenswrapper[4919]: E0930 21:02:05.952640 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="sg-core" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.952649 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="sg-core" Sep 30 21:02:05 crc kubenswrapper[4919]: E0930 21:02:05.953564 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="proxy-httpd" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.953582 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="proxy-httpd" Sep 30 21:02:05 crc kubenswrapper[4919]: E0930 21:02:05.953599 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="ceilometer-notification-agent" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.953605 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="ceilometer-notification-agent" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.953800 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="sg-core" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.953825 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="ceilometer-central-agent" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.953835 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="ceilometer-notification-agent" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.953849 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" containerName="proxy-httpd" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.956125 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.961831 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.962046 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.962160 4919 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-86lzz" Sep 30 21:02:05 crc kubenswrapper[4919]: I0930 21:02:05.964539 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.027841 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efecc023-59a1-472f-83e9-23f55d53f354-config-data\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.027912 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6p256\" (UniqueName: \"kubernetes.io/projected/efecc023-59a1-472f-83e9-23f55d53f354-kube-api-access-6p256\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.027978 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efecc023-59a1-472f-83e9-23f55d53f354-run-httpd\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.028037 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efecc023-59a1-472f-83e9-23f55d53f354-scripts\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.028151 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efecc023-59a1-472f-83e9-23f55d53f354-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.028279 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efecc023-59a1-472f-83e9-23f55d53f354-log-httpd\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.130585 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efecc023-59a1-472f-83e9-23f55d53f354-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.131034 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efecc023-59a1-472f-83e9-23f55d53f354-log-httpd\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.131131 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efecc023-59a1-472f-83e9-23f55d53f354-config-data\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.131150 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6p256\" (UniqueName: \"kubernetes.io/projected/efecc023-59a1-472f-83e9-23f55d53f354-kube-api-access-6p256\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.131195 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efecc023-59a1-472f-83e9-23f55d53f354-run-httpd\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.131264 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efecc023-59a1-472f-83e9-23f55d53f354-scripts\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.131936 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efecc023-59a1-472f-83e9-23f55d53f354-run-httpd\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.132143 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/efecc023-59a1-472f-83e9-23f55d53f354-log-httpd\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.137048 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/efecc023-59a1-472f-83e9-23f55d53f354-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.137086 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/efecc023-59a1-472f-83e9-23f55d53f354-scripts\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.137045 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/efecc023-59a1-472f-83e9-23f55d53f354-config-data\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.154662 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6p256\" (UniqueName: \"kubernetes.io/projected/efecc023-59a1-472f-83e9-23f55d53f354-kube-api-access-6p256\") pod \"ceilometer-0\" (UID: \"efecc023-59a1-472f-83e9-23f55d53f354\") " pod="openstack/ceilometer-0" Sep 30 21:02:06 crc kubenswrapper[4919]: I0930 21:02:06.273152 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 30 21:02:07 crc kubenswrapper[4919]: I0930 21:02:07.538092 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 30 21:02:07 crc kubenswrapper[4919]: I0930 21:02:07.643628 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bfa6a29-fbaa-43d1-9d10-dcd440f5a367" path="/var/lib/kubelet/pods/7bfa6a29-fbaa-43d1-9d10-dcd440f5a367/volumes" Sep 30 21:02:07 crc kubenswrapper[4919]: I0930 21:02:07.874372 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efecc023-59a1-472f-83e9-23f55d53f354","Type":"ContainerStarted","Data":"3c46d24c3c3ca446b9232c0e44b6bc69ed27be30cfe5b7607cce5d8b1e5a1c7a"} Sep 30 21:02:07 crc kubenswrapper[4919]: I0930 21:02:07.877668 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"af3fb66e-cbac-480c-b048-12f8bf6c2013","Type":"ContainerStarted","Data":"38b50b1953d73f531fa3c1be10ece349bd46be0140ed16814ff219bf58a55643"} Sep 30 21:02:07 crc kubenswrapper[4919]: I0930 21:02:07.913902 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=3.159172563 podStartE2EDuration="33.913880843s" podCreationTimestamp="2025-09-30 21:01:34 +0000 UTC" firstStartedPulling="2025-09-30 21:01:36.36593618 +0000 UTC m=+2881.481969307" lastFinishedPulling="2025-09-30 21:02:07.12064446 +0000 UTC m=+2912.236677587" observedRunningTime="2025-09-30 21:02:07.903443631 +0000 UTC m=+2913.019476768" watchObservedRunningTime="2025-09-30 21:02:07.913880843 +0000 UTC m=+2913.029913970" Sep 30 21:02:08 crc kubenswrapper[4919]: I0930 21:02:08.303030 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:02:08 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:02:08 crc kubenswrapper[4919]: > Sep 30 21:02:08 crc kubenswrapper[4919]: I0930 21:02:08.303414 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-proc-0" Sep 30 21:02:08 crc kubenswrapper[4919]: I0930 21:02:08.304026 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cloudkitty-proc" containerStatusID={"Type":"cri-o","ID":"c5df836c6de7c277fef3fb5511ad9c07fa0430efc40eeb3950992e973ff1533d"} pod="openstack/cloudkitty-proc-0" containerMessage="Container cloudkitty-proc failed liveness probe, will be restarted" Sep 30 21:02:08 crc kubenswrapper[4919]: I0930 21:02:08.304085 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" containerID="cri-o://c5df836c6de7c277fef3fb5511ad9c07fa0430efc40eeb3950992e973ff1533d" gracePeriod=30 Sep 30 21:02:08 crc kubenswrapper[4919]: I0930 21:02:08.632818 4919 scope.go:117] "RemoveContainer" containerID="bede75dc9e0058320ead34f6aee078da7746241a1554047cabe89f025ee99b23" Sep 30 21:02:08 crc kubenswrapper[4919]: E0930 21:02:08.633270 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:02:08 crc kubenswrapper[4919]: I0930 21:02:08.888193 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efecc023-59a1-472f-83e9-23f55d53f354","Type":"ContainerStarted","Data":"6b4627faa5328bb843d2f4dd03f985ef935d4cd233dcbb8174fefd70e3052981"} Sep 30 21:02:08 crc kubenswrapper[4919]: I0930 21:02:08.889832 4919 generic.go:334] "Generic (PLEG): container finished" podID="983be098-678e-4ecb-a684-7874ae171f14" containerID="c5df836c6de7c277fef3fb5511ad9c07fa0430efc40eeb3950992e973ff1533d" exitCode=0 Sep 30 21:02:08 crc kubenswrapper[4919]: I0930 21:02:08.889929 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerDied","Data":"c5df836c6de7c277fef3fb5511ad9c07fa0430efc40eeb3950992e973ff1533d"} Sep 30 21:02:09 crc kubenswrapper[4919]: I0930 21:02:09.423492 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:02:09 crc kubenswrapper[4919]: I0930 21:02:09.424556 4919 scope.go:117] "RemoveContainer" containerID="bede75dc9e0058320ead34f6aee078da7746241a1554047cabe89f025ee99b23" Sep 30 21:02:09 crc kubenswrapper[4919]: E0930 21:02:09.424856 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:02:09 crc kubenswrapper[4919]: I0930 21:02:09.591998 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:02:09 crc kubenswrapper[4919]: I0930 21:02:09.593136 4919 scope.go:117] "RemoveContainer" containerID="96d1912bebbb471f87d9d0fb4434d899c6413ca7edf420dd2c86c578932751ee" Sep 30 21:02:09 crc kubenswrapper[4919]: E0930 21:02:09.593449 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:02:09 crc kubenswrapper[4919]: I0930 21:02:09.902445 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerStarted","Data":"3eef72cd2c3b2f8a698f3220b419b80a299b944aeb4f26e1b31922553ab97ce0"} Sep 30 21:02:09 crc kubenswrapper[4919]: I0930 21:02:09.904277 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efecc023-59a1-472f-83e9-23f55d53f354","Type":"ContainerStarted","Data":"7944c15ea9992ab4d719c1182c8d15f670419dedc23cc03ae995e5528db901a7"} Sep 30 21:02:10 crc kubenswrapper[4919]: I0930 21:02:10.849906 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-25ld5/crc-debug-x28v7"] Sep 30 21:02:10 crc kubenswrapper[4919]: I0930 21:02:10.852207 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/crc-debug-x28v7" Sep 30 21:02:10 crc kubenswrapper[4919]: I0930 21:02:10.873814 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Sep 30 21:02:10 crc kubenswrapper[4919]: I0930 21:02:10.914506 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efecc023-59a1-472f-83e9-23f55d53f354","Type":"ContainerStarted","Data":"0d60df5ab966a251e23735696644d979e1375292191973a204f10e72e30d01b2"} Sep 30 21:02:10 crc kubenswrapper[4919]: I0930 21:02:10.961055 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b25ec85c-a544-4c2e-9e16-042c3632e174-host\") pod \"crc-debug-x28v7\" (UID: \"b25ec85c-a544-4c2e-9e16-042c3632e174\") " pod="openshift-must-gather-25ld5/crc-debug-x28v7" Sep 30 21:02:10 crc kubenswrapper[4919]: I0930 21:02:10.961439 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rktnz\" (UniqueName: \"kubernetes.io/projected/b25ec85c-a544-4c2e-9e16-042c3632e174-kube-api-access-rktnz\") pod \"crc-debug-x28v7\" (UID: \"b25ec85c-a544-4c2e-9e16-042c3632e174\") " pod="openshift-must-gather-25ld5/crc-debug-x28v7" Sep 30 21:02:11 crc kubenswrapper[4919]: I0930 21:02:11.062979 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b25ec85c-a544-4c2e-9e16-042c3632e174-host\") pod \"crc-debug-x28v7\" (UID: \"b25ec85c-a544-4c2e-9e16-042c3632e174\") " pod="openshift-must-gather-25ld5/crc-debug-x28v7" Sep 30 21:02:11 crc kubenswrapper[4919]: I0930 21:02:11.063082 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rktnz\" (UniqueName: \"kubernetes.io/projected/b25ec85c-a544-4c2e-9e16-042c3632e174-kube-api-access-rktnz\") pod \"crc-debug-x28v7\" (UID: \"b25ec85c-a544-4c2e-9e16-042c3632e174\") " pod="openshift-must-gather-25ld5/crc-debug-x28v7" Sep 30 21:02:11 crc kubenswrapper[4919]: I0930 21:02:11.063186 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b25ec85c-a544-4c2e-9e16-042c3632e174-host\") pod \"crc-debug-x28v7\" (UID: \"b25ec85c-a544-4c2e-9e16-042c3632e174\") " pod="openshift-must-gather-25ld5/crc-debug-x28v7" Sep 30 21:02:11 crc kubenswrapper[4919]: I0930 21:02:11.092837 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rktnz\" (UniqueName: \"kubernetes.io/projected/b25ec85c-a544-4c2e-9e16-042c3632e174-kube-api-access-rktnz\") pod \"crc-debug-x28v7\" (UID: \"b25ec85c-a544-4c2e-9e16-042c3632e174\") " pod="openshift-must-gather-25ld5/crc-debug-x28v7" Sep 30 21:02:11 crc kubenswrapper[4919]: I0930 21:02:11.194451 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/crc-debug-x28v7" Sep 30 21:02:11 crc kubenswrapper[4919]: W0930 21:02:11.228865 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb25ec85c_a544_4c2e_9e16_042c3632e174.slice/crio-20445499d822bc6f2f1b5bb08d801128de62a92f028436e5b5bf16a113beada5 WatchSource:0}: Error finding container 20445499d822bc6f2f1b5bb08d801128de62a92f028436e5b5bf16a113beada5: Status 404 returned error can't find the container with id 20445499d822bc6f2f1b5bb08d801128de62a92f028436e5b5bf16a113beada5 Sep 30 21:02:11 crc kubenswrapper[4919]: I0930 21:02:11.412249 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:02:11 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:02:11 crc kubenswrapper[4919]: > Sep 30 21:02:11 crc kubenswrapper[4919]: I0930 21:02:11.925412 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25ld5/crc-debug-x28v7" event={"ID":"b25ec85c-a544-4c2e-9e16-042c3632e174","Type":"ContainerStarted","Data":"20445499d822bc6f2f1b5bb08d801128de62a92f028436e5b5bf16a113beada5"} Sep 30 21:02:11 crc kubenswrapper[4919]: I0930 21:02:11.930363 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"efecc023-59a1-472f-83e9-23f55d53f354","Type":"ContainerStarted","Data":"138b16e21ccbeca06c0f4a2006dd9c5f7ce8651ded3c79d0d796c175263613d0"} Sep 30 21:02:11 crc kubenswrapper[4919]: I0930 21:02:11.930756 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 30 21:02:11 crc kubenswrapper[4919]: I0930 21:02:11.951682 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.851451338 podStartE2EDuration="6.951657838s" podCreationTimestamp="2025-09-30 21:02:05 +0000 UTC" firstStartedPulling="2025-09-30 21:02:07.553158294 +0000 UTC m=+2912.669191421" lastFinishedPulling="2025-09-30 21:02:10.653364794 +0000 UTC m=+2915.769397921" observedRunningTime="2025-09-30 21:02:11.946047015 +0000 UTC m=+2917.062080152" watchObservedRunningTime="2025-09-30 21:02:11.951657838 +0000 UTC m=+2917.067690965" Sep 30 21:02:14 crc kubenswrapper[4919]: I0930 21:02:14.337494 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:02:14 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:02:14 crc kubenswrapper[4919]: > Sep 30 21:02:17 crc kubenswrapper[4919]: I0930 21:02:17.356428 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:02:17 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:02:17 crc kubenswrapper[4919]: > Sep 30 21:02:17 crc kubenswrapper[4919]: I0930 21:02:17.356779 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-proc-0" Sep 30 21:02:17 crc kubenswrapper[4919]: I0930 21:02:17.357584 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cloudkitty-proc" containerStatusID={"Type":"cri-o","ID":"3eef72cd2c3b2f8a698f3220b419b80a299b944aeb4f26e1b31922553ab97ce0"} pod="openstack/cloudkitty-proc-0" containerMessage="Container cloudkitty-proc failed liveness probe, will be restarted" Sep 30 21:02:17 crc kubenswrapper[4919]: I0930 21:02:17.357615 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" containerID="cri-o://3eef72cd2c3b2f8a698f3220b419b80a299b944aeb4f26e1b31922553ab97ce0" gracePeriod=30 Sep 30 21:02:17 crc kubenswrapper[4919]: I0930 21:02:17.994000 4919 generic.go:334] "Generic (PLEG): container finished" podID="983be098-678e-4ecb-a684-7874ae171f14" containerID="3eef72cd2c3b2f8a698f3220b419b80a299b944aeb4f26e1b31922553ab97ce0" exitCode=0 Sep 30 21:02:17 crc kubenswrapper[4919]: I0930 21:02:17.994068 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerDied","Data":"3eef72cd2c3b2f8a698f3220b419b80a299b944aeb4f26e1b31922553ab97ce0"} Sep 30 21:02:17 crc kubenswrapper[4919]: I0930 21:02:17.994394 4919 scope.go:117] "RemoveContainer" containerID="c5df836c6de7c277fef3fb5511ad9c07fa0430efc40eeb3950992e973ff1533d" Sep 30 21:02:20 crc kubenswrapper[4919]: I0930 21:02:20.873927 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Sep 30 21:02:20 crc kubenswrapper[4919]: I0930 21:02:20.876575 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Sep 30 21:02:21 crc kubenswrapper[4919]: I0930 21:02:21.030664 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Sep 30 21:02:21 crc kubenswrapper[4919]: I0930 21:02:21.632332 4919 scope.go:117] "RemoveContainer" containerID="bede75dc9e0058320ead34f6aee078da7746241a1554047cabe89f025ee99b23" Sep 30 21:02:21 crc kubenswrapper[4919]: E0930 21:02:21.632931 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:02:24 crc kubenswrapper[4919]: I0930 21:02:24.062677 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25ld5/crc-debug-x28v7" event={"ID":"b25ec85c-a544-4c2e-9e16-042c3632e174","Type":"ContainerStarted","Data":"bb6f4e140344374c5dec36c318fbfbe7fd58f4dd9c2623937391563f14bdc2dd"} Sep 30 21:02:24 crc kubenswrapper[4919]: I0930 21:02:24.064930 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerStarted","Data":"0a58f4082e44714cb74f60fd877740a46a70c102ad5088c8258554b2d44a353c"} Sep 30 21:02:24 crc kubenswrapper[4919]: I0930 21:02:24.087622 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-25ld5/crc-debug-x28v7" podStartSLOduration=2.368297863 podStartE2EDuration="14.087600217s" podCreationTimestamp="2025-09-30 21:02:10 +0000 UTC" firstStartedPulling="2025-09-30 21:02:11.231114056 +0000 UTC m=+2916.347147183" lastFinishedPulling="2025-09-30 21:02:22.95041641 +0000 UTC m=+2928.066449537" observedRunningTime="2025-09-30 21:02:24.075585599 +0000 UTC m=+2929.191618726" watchObservedRunningTime="2025-09-30 21:02:24.087600217 +0000 UTC m=+2929.203633344" Sep 30 21:02:24 crc kubenswrapper[4919]: I0930 21:02:24.632172 4919 scope.go:117] "RemoveContainer" containerID="96d1912bebbb471f87d9d0fb4434d899c6413ca7edf420dd2c86c578932751ee" Sep 30 21:02:25 crc kubenswrapper[4919]: I0930 21:02:25.075543 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" event={"ID":"0dde18df-d1bd-4b36-82af-cd0967cd942b","Type":"ContainerStarted","Data":"60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e"} Sep 30 21:02:25 crc kubenswrapper[4919]: I0930 21:02:25.076301 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:02:25 crc kubenswrapper[4919]: I0930 21:02:25.078005 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" containerName="gateway" probeResult="failure" output="Get \"https://10.217.0.252:8081/ready\": dial tcp 10.217.0.252:8081: connect: connection refused" Sep 30 21:02:25 crc kubenswrapper[4919]: I0930 21:02:25.097514 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podStartSLOduration=100.691645162 podStartE2EDuration="1m46.097499233s" podCreationTimestamp="2025-09-30 21:00:39 +0000 UTC" firstStartedPulling="2025-09-30 21:00:40.330722588 +0000 UTC m=+2825.446755715" lastFinishedPulling="2025-09-30 21:00:45.736576659 +0000 UTC m=+2830.852609786" observedRunningTime="2025-09-30 21:02:25.093771325 +0000 UTC m=+2930.209804452" watchObservedRunningTime="2025-09-30 21:02:25.097499233 +0000 UTC m=+2930.213532360" Sep 30 21:02:26 crc kubenswrapper[4919]: I0930 21:02:26.062247 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:02:26 crc kubenswrapper[4919]: I0930 21:02:26.062780 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:02:26 crc kubenswrapper[4919]: I0930 21:02:26.062826 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" Sep 30 21:02:26 crc kubenswrapper[4919]: I0930 21:02:26.063554 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5"} pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 30 21:02:26 crc kubenswrapper[4919]: I0930 21:02:26.063603 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" containerID="cri-o://becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" gracePeriod=600 Sep 30 21:02:26 crc kubenswrapper[4919]: I0930 21:02:26.101658 4919 generic.go:334] "Generic (PLEG): container finished" podID="0dde18df-d1bd-4b36-82af-cd0967cd942b" containerID="60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e" exitCode=2 Sep 30 21:02:26 crc kubenswrapper[4919]: I0930 21:02:26.101708 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" event={"ID":"0dde18df-d1bd-4b36-82af-cd0967cd942b","Type":"ContainerDied","Data":"60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e"} Sep 30 21:02:26 crc kubenswrapper[4919]: I0930 21:02:26.101746 4919 scope.go:117] "RemoveContainer" containerID="96d1912bebbb471f87d9d0fb4434d899c6413ca7edf420dd2c86c578932751ee" Sep 30 21:02:26 crc kubenswrapper[4919]: I0930 21:02:26.102417 4919 scope.go:117] "RemoveContainer" containerID="60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e" Sep 30 21:02:26 crc kubenswrapper[4919]: E0930 21:02:26.102720 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:02:26 crc kubenswrapper[4919]: I0930 21:02:26.441739 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:02:26 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:02:26 crc kubenswrapper[4919]: > Sep 30 21:02:26 crc kubenswrapper[4919]: E0930 21:02:26.752791 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:02:27 crc kubenswrapper[4919]: I0930 21:02:27.117054 4919 generic.go:334] "Generic (PLEG): container finished" podID="eb371a63-6d82-453e-930e-656710b97f10" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" exitCode=0 Sep 30 21:02:27 crc kubenswrapper[4919]: I0930 21:02:27.117166 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerDied","Data":"becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5"} Sep 30 21:02:27 crc kubenswrapper[4919]: I0930 21:02:27.117252 4919 scope.go:117] "RemoveContainer" containerID="e80e39ff8cf4c5d798c0a77b763a10876f1d8ee226789c307d8c40f2aedc19fa" Sep 30 21:02:27 crc kubenswrapper[4919]: I0930 21:02:27.118135 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:02:27 crc kubenswrapper[4919]: E0930 21:02:27.118686 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:02:27 crc kubenswrapper[4919]: I0930 21:02:27.124022 4919 scope.go:117] "RemoveContainer" containerID="60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e" Sep 30 21:02:27 crc kubenswrapper[4919]: E0930 21:02:27.124595 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:02:27 crc kubenswrapper[4919]: I0930 21:02:27.618205 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Sep 30 21:02:29 crc kubenswrapper[4919]: I0930 21:02:29.307196 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:02:29 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:02:29 crc kubenswrapper[4919]: > Sep 30 21:02:32 crc kubenswrapper[4919]: I0930 21:02:32.400515 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:02:32 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:02:32 crc kubenswrapper[4919]: > Sep 30 21:02:32 crc kubenswrapper[4919]: I0930 21:02:32.401158 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-proc-0" Sep 30 21:02:32 crc kubenswrapper[4919]: I0930 21:02:32.401961 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cloudkitty-proc" containerStatusID={"Type":"cri-o","ID":"0a58f4082e44714cb74f60fd877740a46a70c102ad5088c8258554b2d44a353c"} pod="openstack/cloudkitty-proc-0" containerMessage="Container cloudkitty-proc failed liveness probe, will be restarted" Sep 30 21:02:32 crc kubenswrapper[4919]: I0930 21:02:32.401987 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" containerID="cri-o://0a58f4082e44714cb74f60fd877740a46a70c102ad5088c8258554b2d44a353c" gracePeriod=30 Sep 30 21:02:32 crc kubenswrapper[4919]: I0930 21:02:32.632393 4919 scope.go:117] "RemoveContainer" containerID="bede75dc9e0058320ead34f6aee078da7746241a1554047cabe89f025ee99b23" Sep 30 21:02:32 crc kubenswrapper[4919]: E0930 21:02:32.991663 4919 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod983be098_678e_4ecb_a684_7874ae171f14.slice/crio-conmon-0a58f4082e44714cb74f60fd877740a46a70c102ad5088c8258554b2d44a353c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod983be098_678e_4ecb_a684_7874ae171f14.slice/crio-0a58f4082e44714cb74f60fd877740a46a70c102ad5088c8258554b2d44a353c.scope\": RecentStats: unable to find data in memory cache]" Sep 30 21:02:33 crc kubenswrapper[4919]: I0930 21:02:33.187287 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" event={"ID":"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3","Type":"ContainerStarted","Data":"882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4"} Sep 30 21:02:33 crc kubenswrapper[4919]: I0930 21:02:33.187879 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:02:33 crc kubenswrapper[4919]: I0930 21:02:33.189175 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" containerName="gateway" probeResult="failure" output="Get \"https://10.217.0.251:8081/ready\": dial tcp 10.217.0.251:8081: connect: connection refused" Sep 30 21:02:33 crc kubenswrapper[4919]: I0930 21:02:33.192188 4919 generic.go:334] "Generic (PLEG): container finished" podID="983be098-678e-4ecb-a684-7874ae171f14" containerID="0a58f4082e44714cb74f60fd877740a46a70c102ad5088c8258554b2d44a353c" exitCode=0 Sep 30 21:02:33 crc kubenswrapper[4919]: I0930 21:02:33.192246 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerDied","Data":"0a58f4082e44714cb74f60fd877740a46a70c102ad5088c8258554b2d44a353c"} Sep 30 21:02:33 crc kubenswrapper[4919]: I0930 21:02:33.192305 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerStarted","Data":"6fdca781f1af682bc4fbaa88445c65f659797fd92b94a46adf4300f3dff3c28d"} Sep 30 21:02:33 crc kubenswrapper[4919]: I0930 21:02:33.192433 4919 scope.go:117] "RemoveContainer" containerID="3eef72cd2c3b2f8a698f3220b419b80a299b944aeb4f26e1b31922553ab97ce0" Sep 30 21:02:34 crc kubenswrapper[4919]: I0930 21:02:34.204815 4919 generic.go:334] "Generic (PLEG): container finished" podID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" containerID="882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4" exitCode=2 Sep 30 21:02:34 crc kubenswrapper[4919]: I0930 21:02:34.204869 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" event={"ID":"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3","Type":"ContainerDied","Data":"882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4"} Sep 30 21:02:34 crc kubenswrapper[4919]: I0930 21:02:34.205468 4919 scope.go:117] "RemoveContainer" containerID="bede75dc9e0058320ead34f6aee078da7746241a1554047cabe89f025ee99b23" Sep 30 21:02:34 crc kubenswrapper[4919]: I0930 21:02:34.205624 4919 scope.go:117] "RemoveContainer" containerID="882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4" Sep 30 21:02:34 crc kubenswrapper[4919]: E0930 21:02:34.205912 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:02:35 crc kubenswrapper[4919]: I0930 21:02:35.233512 4919 scope.go:117] "RemoveContainer" containerID="882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4" Sep 30 21:02:35 crc kubenswrapper[4919]: E0930 21:02:35.234717 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:02:36 crc kubenswrapper[4919]: I0930 21:02:36.276846 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 30 21:02:37 crc kubenswrapper[4919]: I0930 21:02:37.632806 4919 scope.go:117] "RemoveContainer" containerID="60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e" Sep 30 21:02:37 crc kubenswrapper[4919]: E0930 21:02:37.633678 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:02:38 crc kubenswrapper[4919]: I0930 21:02:38.325749 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:02:38 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:02:38 crc kubenswrapper[4919]: > Sep 30 21:02:39 crc kubenswrapper[4919]: I0930 21:02:39.423990 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:02:39 crc kubenswrapper[4919]: I0930 21:02:39.425159 4919 scope.go:117] "RemoveContainer" containerID="882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4" Sep 30 21:02:39 crc kubenswrapper[4919]: E0930 21:02:39.425414 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:02:39 crc kubenswrapper[4919]: I0930 21:02:39.592459 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:02:39 crc kubenswrapper[4919]: I0930 21:02:39.593441 4919 scope.go:117] "RemoveContainer" containerID="60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e" Sep 30 21:02:39 crc kubenswrapper[4919]: E0930 21:02:39.593787 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:02:41 crc kubenswrapper[4919]: I0930 21:02:41.403656 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:02:41 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:02:41 crc kubenswrapper[4919]: > Sep 30 21:02:41 crc kubenswrapper[4919]: I0930 21:02:41.633596 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:02:41 crc kubenswrapper[4919]: E0930 21:02:41.633954 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:02:44 crc kubenswrapper[4919]: I0930 21:02:44.348853 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:02:44 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:02:44 crc kubenswrapper[4919]: > Sep 30 21:02:44 crc kubenswrapper[4919]: I0930 21:02:44.349240 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-proc-0" Sep 30 21:02:44 crc kubenswrapper[4919]: I0930 21:02:44.350010 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cloudkitty-proc" containerStatusID={"Type":"cri-o","ID":"6fdca781f1af682bc4fbaa88445c65f659797fd92b94a46adf4300f3dff3c28d"} pod="openstack/cloudkitty-proc-0" containerMessage="Container cloudkitty-proc failed liveness probe, will be restarted" Sep 30 21:02:44 crc kubenswrapper[4919]: I0930 21:02:44.350046 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" containerID="cri-o://6fdca781f1af682bc4fbaa88445c65f659797fd92b94a46adf4300f3dff3c28d" gracePeriod=30 Sep 30 21:02:44 crc kubenswrapper[4919]: E0930 21:02:44.795851 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 20s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:02:45 crc kubenswrapper[4919]: I0930 21:02:45.347087 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerDied","Data":"6fdca781f1af682bc4fbaa88445c65f659797fd92b94a46adf4300f3dff3c28d"} Sep 30 21:02:45 crc kubenswrapper[4919]: I0930 21:02:45.347139 4919 scope.go:117] "RemoveContainer" containerID="0a58f4082e44714cb74f60fd877740a46a70c102ad5088c8258554b2d44a353c" Sep 30 21:02:45 crc kubenswrapper[4919]: I0930 21:02:45.347050 4919 generic.go:334] "Generic (PLEG): container finished" podID="983be098-678e-4ecb-a684-7874ae171f14" containerID="6fdca781f1af682bc4fbaa88445c65f659797fd92b94a46adf4300f3dff3c28d" exitCode=0 Sep 30 21:02:45 crc kubenswrapper[4919]: I0930 21:02:45.352434 4919 scope.go:117] "RemoveContainer" containerID="6fdca781f1af682bc4fbaa88445c65f659797fd92b94a46adf4300f3dff3c28d" Sep 30 21:02:45 crc kubenswrapper[4919]: E0930 21:02:45.352810 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 20s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:02:53 crc kubenswrapper[4919]: I0930 21:02:53.632302 4919 scope.go:117] "RemoveContainer" containerID="60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e" Sep 30 21:02:53 crc kubenswrapper[4919]: E0930 21:02:53.633157 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:02:54 crc kubenswrapper[4919]: I0930 21:02:54.632853 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:02:54 crc kubenswrapper[4919]: I0930 21:02:54.634377 4919 scope.go:117] "RemoveContainer" containerID="882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4" Sep 30 21:02:54 crc kubenswrapper[4919]: E0930 21:02:54.634655 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:02:54 crc kubenswrapper[4919]: E0930 21:02:54.634661 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:03:00 crc kubenswrapper[4919]: I0930 21:03:00.632497 4919 scope.go:117] "RemoveContainer" containerID="6fdca781f1af682bc4fbaa88445c65f659797fd92b94a46adf4300f3dff3c28d" Sep 30 21:03:00 crc kubenswrapper[4919]: E0930 21:03:00.633417 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 20s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:03:07 crc kubenswrapper[4919]: I0930 21:03:07.635289 4919 scope.go:117] "RemoveContainer" containerID="882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4" Sep 30 21:03:07 crc kubenswrapper[4919]: E0930 21:03:07.636597 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:03:07 crc kubenswrapper[4919]: I0930 21:03:07.649801 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:03:07 crc kubenswrapper[4919]: E0930 21:03:07.650273 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:03:08 crc kubenswrapper[4919]: I0930 21:03:08.633544 4919 scope.go:117] "RemoveContainer" containerID="60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e" Sep 30 21:03:08 crc kubenswrapper[4919]: E0930 21:03:08.633773 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:03:13 crc kubenswrapper[4919]: I0930 21:03:13.632236 4919 scope.go:117] "RemoveContainer" containerID="6fdca781f1af682bc4fbaa88445c65f659797fd92b94a46adf4300f3dff3c28d" Sep 30 21:03:14 crc kubenswrapper[4919]: I0930 21:03:14.723166 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerStarted","Data":"600dc83417fc6e0c81d87b5096b480e375eb99db17f205ac974475a2d3fec249"} Sep 30 21:03:17 crc kubenswrapper[4919]: I0930 21:03:17.365031 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:03:17 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:03:17 crc kubenswrapper[4919]: > Sep 30 21:03:18 crc kubenswrapper[4919]: I0930 21:03:18.633201 4919 scope.go:117] "RemoveContainer" containerID="882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4" Sep 30 21:03:18 crc kubenswrapper[4919]: E0930 21:03:18.633684 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:03:19 crc kubenswrapper[4919]: I0930 21:03:19.632347 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:03:19 crc kubenswrapper[4919]: E0930 21:03:19.632839 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:03:20 crc kubenswrapper[4919]: I0930 21:03:20.417323 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:03:20 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:03:20 crc kubenswrapper[4919]: > Sep 30 21:03:23 crc kubenswrapper[4919]: I0930 21:03:23.360564 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:03:23 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:03:23 crc kubenswrapper[4919]: > Sep 30 21:03:23 crc kubenswrapper[4919]: I0930 21:03:23.361126 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-proc-0" Sep 30 21:03:23 crc kubenswrapper[4919]: I0930 21:03:23.361893 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cloudkitty-proc" containerStatusID={"Type":"cri-o","ID":"600dc83417fc6e0c81d87b5096b480e375eb99db17f205ac974475a2d3fec249"} pod="openstack/cloudkitty-proc-0" containerMessage="Container cloudkitty-proc failed liveness probe, will be restarted" Sep 30 21:03:23 crc kubenswrapper[4919]: I0930 21:03:23.361920 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" containerID="cri-o://600dc83417fc6e0c81d87b5096b480e375eb99db17f205ac974475a2d3fec249" gracePeriod=30 Sep 30 21:03:23 crc kubenswrapper[4919]: I0930 21:03:23.634531 4919 scope.go:117] "RemoveContainer" containerID="60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e" Sep 30 21:03:23 crc kubenswrapper[4919]: E0930 21:03:23.634927 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:03:23 crc kubenswrapper[4919]: E0930 21:03:23.907773 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:03:24 crc kubenswrapper[4919]: I0930 21:03:24.855573 4919 generic.go:334] "Generic (PLEG): container finished" podID="983be098-678e-4ecb-a684-7874ae171f14" containerID="600dc83417fc6e0c81d87b5096b480e375eb99db17f205ac974475a2d3fec249" exitCode=0 Sep 30 21:03:24 crc kubenswrapper[4919]: I0930 21:03:24.855613 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerDied","Data":"600dc83417fc6e0c81d87b5096b480e375eb99db17f205ac974475a2d3fec249"} Sep 30 21:03:24 crc kubenswrapper[4919]: I0930 21:03:24.855643 4919 scope.go:117] "RemoveContainer" containerID="6fdca781f1af682bc4fbaa88445c65f659797fd92b94a46adf4300f3dff3c28d" Sep 30 21:03:24 crc kubenswrapper[4919]: I0930 21:03:24.856268 4919 scope.go:117] "RemoveContainer" containerID="600dc83417fc6e0c81d87b5096b480e375eb99db17f205ac974475a2d3fec249" Sep 30 21:03:24 crc kubenswrapper[4919]: E0930 21:03:24.856489 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:03:27 crc kubenswrapper[4919]: I0930 21:03:27.612984 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_df15c633-42da-4bd4-8d99-042064dcb9cf/init-config-reloader/0.log" Sep 30 21:03:27 crc kubenswrapper[4919]: I0930 21:03:27.779288 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_df15c633-42da-4bd4-8d99-042064dcb9cf/init-config-reloader/0.log" Sep 30 21:03:27 crc kubenswrapper[4919]: I0930 21:03:27.810560 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_df15c633-42da-4bd4-8d99-042064dcb9cf/config-reloader/0.log" Sep 30 21:03:27 crc kubenswrapper[4919]: I0930 21:03:27.810911 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_df15c633-42da-4bd4-8d99-042064dcb9cf/alertmanager/0.log" Sep 30 21:03:27 crc kubenswrapper[4919]: I0930 21:03:27.966736 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-ff644977d-6mdtd_685bc25b-e005-4a7c-933b-87fd1b925709/barbican-api/0.log" Sep 30 21:03:28 crc kubenswrapper[4919]: I0930 21:03:28.025422 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-ff644977d-6mdtd_685bc25b-e005-4a7c-933b-87fd1b925709/barbican-api-log/0.log" Sep 30 21:03:28 crc kubenswrapper[4919]: I0930 21:03:28.197137 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6dbbb7bcf8-57zt6_32220ef4-7a02-469d-8d56-fd48736838e0/barbican-keystone-listener/0.log" Sep 30 21:03:28 crc kubenswrapper[4919]: I0930 21:03:28.227948 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6dbbb7bcf8-57zt6_32220ef4-7a02-469d-8d56-fd48736838e0/barbican-keystone-listener-log/0.log" Sep 30 21:03:28 crc kubenswrapper[4919]: I0930 21:03:28.376811 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f8946bc95-rbsd8_0ffb951e-fda6-4079-ba13-02ddbd2ab58f/barbican-worker/0.log" Sep 30 21:03:28 crc kubenswrapper[4919]: I0930 21:03:28.415415 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f8946bc95-rbsd8_0ffb951e-fda6-4079-ba13-02ddbd2ab58f/barbican-worker-log/0.log" Sep 30 21:03:28 crc kubenswrapper[4919]: I0930 21:03:28.648757 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-x5dmg_8ed94f60-cd6c-4559-879b-de97554383c6/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:28 crc kubenswrapper[4919]: I0930 21:03:28.817335 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_efecc023-59a1-472f-83e9-23f55d53f354/ceilometer-central-agent/0.log" Sep 30 21:03:28 crc kubenswrapper[4919]: I0930 21:03:28.867666 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_efecc023-59a1-472f-83e9-23f55d53f354/ceilometer-notification-agent/0.log" Sep 30 21:03:28 crc kubenswrapper[4919]: I0930 21:03:28.879158 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_efecc023-59a1-472f-83e9-23f55d53f354/proxy-httpd/0.log" Sep 30 21:03:28 crc kubenswrapper[4919]: I0930 21:03:28.989301 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_efecc023-59a1-472f-83e9-23f55d53f354/sg-core/0.log" Sep 30 21:03:29 crc kubenswrapper[4919]: I0930 21:03:29.109717 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2326593c-681c-435f-85bc-126dfddc85a4/cinder-api/0.log" Sep 30 21:03:29 crc kubenswrapper[4919]: I0930 21:03:29.224814 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_2326593c-681c-435f-85bc-126dfddc85a4/cinder-api-log/0.log" Sep 30 21:03:29 crc kubenswrapper[4919]: I0930 21:03:29.319857 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0207f9a6-d481-4499-81a2-7e9bbaba9000/cinder-scheduler/0.log" Sep 30 21:03:29 crc kubenswrapper[4919]: I0930 21:03:29.418023 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0207f9a6-d481-4499-81a2-7e9bbaba9000/probe/0.log" Sep 30 21:03:29 crc kubenswrapper[4919]: I0930 21:03:29.502051 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_72286461-f967-4cb3-8ed4-6387ad420acb/cloudkitty-api/0.log" Sep 30 21:03:29 crc kubenswrapper[4919]: I0930 21:03:29.615862 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_72286461-f967-4cb3-8ed4-6387ad420acb/cloudkitty-api-log/0.log" Sep 30 21:03:29 crc kubenswrapper[4919]: I0930 21:03:29.690846 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-db-create-pc5n6_d40b718b-eccc-4eb3-b782-b7f1e68b47ca/mariadb-database-create/0.log" Sep 30 21:03:29 crc kubenswrapper[4919]: I0930 21:03:29.887277 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-db-sync-dw2jq_f781d002-f3f5-43e1-863c-ceb1fd87ec79/cloudkitty-db-sync/0.log" Sep 30 21:03:30 crc kubenswrapper[4919]: I0930 21:03:30.015973 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-fb7a-account-create-js49h_c3f9fc61-e4ed-4943-a0a3-f152aa21d724/mariadb-account-create/0.log" Sep 30 21:03:30 crc kubenswrapper[4919]: I0930 21:03:30.171769 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_0c6a0e4a-b52a-4312-a36e-94c6f709200a/loki-compactor/0.log" Sep 30 21:03:30 crc kubenswrapper[4919]: I0930 21:03:30.303732 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-bccccd5f6-zhbb7_182374fe-7fd0-4267-b938-396ef9eabd7f/loki-distributor/0.log" Sep 30 21:03:30 crc kubenswrapper[4919]: I0930 21:03:30.387184 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-89dc74b89-6jf89_53a8fa4c-0ba7-4e41-86e5-e4e767126bc3/gateway/4.log" Sep 30 21:03:30 crc kubenswrapper[4919]: I0930 21:03:30.469614 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-89dc74b89-6jf89_53a8fa4c-0ba7-4e41-86e5-e4e767126bc3/gateway/4.log" Sep 30 21:03:30 crc kubenswrapper[4919]: I0930 21:03:30.558231 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-89dc74b89-bs96r_0dde18df-d1bd-4b36-82af-cd0967cd942b/gateway/4.log" Sep 30 21:03:30 crc kubenswrapper[4919]: I0930 21:03:30.560964 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-89dc74b89-bs96r_0dde18df-d1bd-4b36-82af-cd0967cd942b/gateway/4.log" Sep 30 21:03:30 crc kubenswrapper[4919]: I0930 21:03:30.730752 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_871d1c5f-b69f-44c9-88c0-f72c11d61eb5/loki-index-gateway/0.log" Sep 30 21:03:30 crc kubenswrapper[4919]: I0930 21:03:30.763715 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_85f27421-a520-4043-b8d5-7729b07a0bed/loki-ingester/0.log" Sep 30 21:03:30 crc kubenswrapper[4919]: I0930 21:03:30.975156 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-6b6cdc96db-9xqzp_300e57fc-d2df-468a-8c02-0bff21cd53c1/loki-querier/0.log" Sep 30 21:03:30 crc kubenswrapper[4919]: I0930 21:03:30.991843 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-7c7846d94c-ml8vd_8a614d9b-d891-48aa-9a64-d6b5187a8f73/loki-query-frontend/0.log" Sep 30 21:03:31 crc kubenswrapper[4919]: I0930 21:03:31.167791 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_983be098-678e-4ecb-a684-7874ae171f14/cloudkitty-proc/4.log" Sep 30 21:03:31 crc kubenswrapper[4919]: I0930 21:03:31.205198 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_983be098-678e-4ecb-a684-7874ae171f14/cloudkitty-proc/4.log" Sep 30 21:03:31 crc kubenswrapper[4919]: I0930 21:03:31.427459 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-storageinit-gnq6c_b4df6b31-fbe9-43f8-ac28-3d5933009f2b/cloudkitty-storageinit/0.log" Sep 30 21:03:31 crc kubenswrapper[4919]: I0930 21:03:31.438200 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-6gdhf_27370bb4-04b4-4f01-b60d-e45c208a51a0/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:31 crc kubenswrapper[4919]: I0930 21:03:31.592636 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-4vfng_998048ab-cfdd-4179-a40e-345b0d8a792d/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:31 crc kubenswrapper[4919]: I0930 21:03:31.633000 4919 scope.go:117] "RemoveContainer" containerID="882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4" Sep 30 21:03:31 crc kubenswrapper[4919]: E0930 21:03:31.633198 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:03:31 crc kubenswrapper[4919]: I0930 21:03:31.720045 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-s8n78_18129bcf-0bdc-4437-83de-b9e5f20c66d3/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:31 crc kubenswrapper[4919]: I0930 21:03:31.811273 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-lh6l6_0329240c-67c8-4d59-97ee-17350f696ce2/init/0.log" Sep 30 21:03:31 crc kubenswrapper[4919]: I0930 21:03:31.929790 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-lh6l6_0329240c-67c8-4d59-97ee-17350f696ce2/init/0.log" Sep 30 21:03:31 crc kubenswrapper[4919]: I0930 21:03:31.970569 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cb6ffcf87-lh6l6_0329240c-67c8-4d59-97ee-17350f696ce2/dnsmasq-dns/0.log" Sep 30 21:03:32 crc kubenswrapper[4919]: I0930 21:03:32.044634 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-s4wls_8d811837-df5f-49b4-bd4f-88bf57aa20b4/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:32 crc kubenswrapper[4919]: I0930 21:03:32.177348 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_bf52a250-e8fb-4bd1-a25e-2852fbfb0804/glance-log/0.log" Sep 30 21:03:32 crc kubenswrapper[4919]: I0930 21:03:32.184889 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_bf52a250-e8fb-4bd1-a25e-2852fbfb0804/glance-httpd/0.log" Sep 30 21:03:32 crc kubenswrapper[4919]: I0930 21:03:32.285186 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_a78d2c04-3656-417b-ace2-cba6a7e90060/glance-httpd/0.log" Sep 30 21:03:32 crc kubenswrapper[4919]: I0930 21:03:32.377718 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_a78d2c04-3656-417b-ace2-cba6a7e90060/glance-log/0.log" Sep 30 21:03:32 crc kubenswrapper[4919]: I0930 21:03:32.428672 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-khgcj_5f109611-d866-4b48-be9e-2b8296544fcb/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:32 crc kubenswrapper[4919]: I0930 21:03:32.607391 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-tsxkh_06ab2a0e-429a-46ef-9458-18c15c4142c3/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:32 crc kubenswrapper[4919]: I0930 21:03:32.716598 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-659c895849-vsrcz_b8f18d42-d7ea-42d1-bbcb-a81afc1b0508/keystone-api/0.log" Sep 30 21:03:32 crc kubenswrapper[4919]: I0930 21:03:32.816586 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29321101-km2h6_788fa050-bf94-4ec2-b030-7dc6f5ecfef3/keystone-cron/0.log" Sep 30 21:03:32 crc kubenswrapper[4919]: I0930 21:03:32.923483 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-4qsjp_dba87a6c-f6b5-4ef3-920a-fa94968c3602/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:33 crc kubenswrapper[4919]: I0930 21:03:33.124579 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-db95ddc59-4ffw5_3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0/neutron-api/0.log" Sep 30 21:03:33 crc kubenswrapper[4919]: I0930 21:03:33.202725 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-db95ddc59-4ffw5_3ae684b0-0fe1-48e2-bcf3-1de5b70a1cc0/neutron-httpd/0.log" Sep 30 21:03:33 crc kubenswrapper[4919]: I0930 21:03:33.373856 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-shvrp_64b3c647-b4dd-4f5f-9ddc-001dd913c43c/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:33 crc kubenswrapper[4919]: I0930 21:03:33.632179 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:03:33 crc kubenswrapper[4919]: E0930 21:03:33.632840 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:03:33 crc kubenswrapper[4919]: I0930 21:03:33.692996 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0218baef-cb5d-45c2-8a23-bb06a2887c7b/nova-api-log/0.log" Sep 30 21:03:33 crc kubenswrapper[4919]: I0930 21:03:33.818923 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_0218baef-cb5d-45c2-8a23-bb06a2887c7b/nova-api-api/0.log" Sep 30 21:03:34 crc kubenswrapper[4919]: I0930 21:03:34.014714 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_b8ef3110-c308-43a3-a7b8-18d0f7d50488/nova-cell0-conductor-conductor/0.log" Sep 30 21:03:34 crc kubenswrapper[4919]: I0930 21:03:34.201413 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_6abf178b-97a5-488a-a31e-7af9a7c0a710/nova-cell1-conductor-conductor/0.log" Sep 30 21:03:34 crc kubenswrapper[4919]: I0930 21:03:34.270473 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_ffac99cf-7663-4d4c-a617-2b15d249f07b/nova-cell1-novncproxy-novncproxy/0.log" Sep 30 21:03:34 crc kubenswrapper[4919]: I0930 21:03:34.526616 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-j4ggx_394bf41c-6bf1-40f4-af0c-41ba74713e03/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:34 crc kubenswrapper[4919]: I0930 21:03:34.633923 4919 scope.go:117] "RemoveContainer" containerID="60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e" Sep 30 21:03:34 crc kubenswrapper[4919]: E0930 21:03:34.634527 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:03:34 crc kubenswrapper[4919]: I0930 21:03:34.741396 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_5f40f126-0a68-488f-ae68-c56dc4581bd1/nova-metadata-log/0.log" Sep 30 21:03:35 crc kubenswrapper[4919]: I0930 21:03:35.090137 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_4a6ed554-1c0c-4d0e-9506-103517b7b065/nova-scheduler-scheduler/0.log" Sep 30 21:03:35 crc kubenswrapper[4919]: I0930 21:03:35.338964 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f35330fc-f5b9-461f-801e-9ae42bd20866/mysql-bootstrap/0.log" Sep 30 21:03:35 crc kubenswrapper[4919]: I0930 21:03:35.481871 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f35330fc-f5b9-461f-801e-9ae42bd20866/mysql-bootstrap/0.log" Sep 30 21:03:35 crc kubenswrapper[4919]: I0930 21:03:35.571687 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_f35330fc-f5b9-461f-801e-9ae42bd20866/galera/0.log" Sep 30 21:03:35 crc kubenswrapper[4919]: I0930 21:03:35.743916 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_5f40f126-0a68-488f-ae68-c56dc4581bd1/nova-metadata-metadata/0.log" Sep 30 21:03:35 crc kubenswrapper[4919]: I0930 21:03:35.788736 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_d5e405b0-b5a3-4313-8fd2-b592b38e5926/mysql-bootstrap/0.log" Sep 30 21:03:35 crc kubenswrapper[4919]: I0930 21:03:35.983426 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_d5e405b0-b5a3-4313-8fd2-b592b38e5926/mysql-bootstrap/0.log" Sep 30 21:03:36 crc kubenswrapper[4919]: I0930 21:03:36.036167 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_d5e405b0-b5a3-4313-8fd2-b592b38e5926/galera/0.log" Sep 30 21:03:36 crc kubenswrapper[4919]: I0930 21:03:36.196655 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_e3a2eec4-8bce-4afb-8eb9-e57417515312/openstackclient/0.log" Sep 30 21:03:36 crc kubenswrapper[4919]: I0930 21:03:36.305135 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jx7tr_8b4d1dc0-4d24-4128-a83b-9f37e7356309/ovn-controller/0.log" Sep 30 21:03:36 crc kubenswrapper[4919]: I0930 21:03:36.551986 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-77ggv_a3c9988e-e6d9-4986-bd84-8cd3f8c7f6ea/openstack-network-exporter/0.log" Sep 30 21:03:36 crc kubenswrapper[4919]: I0930 21:03:36.632734 4919 scope.go:117] "RemoveContainer" containerID="600dc83417fc6e0c81d87b5096b480e375eb99db17f205ac974475a2d3fec249" Sep 30 21:03:36 crc kubenswrapper[4919]: E0930 21:03:36.633166 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:03:36 crc kubenswrapper[4919]: I0930 21:03:36.746658 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ppjcf_52b6421a-9e6a-490d-9940-b2931f34aae1/ovsdb-server-init/0.log" Sep 30 21:03:36 crc kubenswrapper[4919]: I0930 21:03:36.910477 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ppjcf_52b6421a-9e6a-490d-9940-b2931f34aae1/ovsdb-server-init/0.log" Sep 30 21:03:36 crc kubenswrapper[4919]: I0930 21:03:36.932607 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ppjcf_52b6421a-9e6a-490d-9940-b2931f34aae1/ovs-vswitchd/0.log" Sep 30 21:03:36 crc kubenswrapper[4919]: I0930 21:03:36.992353 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ppjcf_52b6421a-9e6a-490d-9940-b2931f34aae1/ovsdb-server/0.log" Sep 30 21:03:37 crc kubenswrapper[4919]: I0930 21:03:37.153587 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-q2khk_169a1e1c-15ca-4930-942f-48ac6a92d964/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:37 crc kubenswrapper[4919]: I0930 21:03:37.365641 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_fa074105-b5b2-44de-b6f1-5c62086574e4/openstack-network-exporter/0.log" Sep 30 21:03:37 crc kubenswrapper[4919]: I0930 21:03:37.421181 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_fa074105-b5b2-44de-b6f1-5c62086574e4/ovn-northd/0.log" Sep 30 21:03:37 crc kubenswrapper[4919]: I0930 21:03:37.544349 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_87cf8edf-c133-4a62-939f-72dc079db17e/openstack-network-exporter/0.log" Sep 30 21:03:37 crc kubenswrapper[4919]: I0930 21:03:37.607627 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_87cf8edf-c133-4a62-939f-72dc079db17e/ovsdbserver-nb/0.log" Sep 30 21:03:37 crc kubenswrapper[4919]: I0930 21:03:37.741939 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3/openstack-network-exporter/0.log" Sep 30 21:03:37 crc kubenswrapper[4919]: I0930 21:03:37.809043 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_5b354ea4-a8cd-4c32-aa58-4e5e5d56a1e3/ovsdbserver-sb/0.log" Sep 30 21:03:37 crc kubenswrapper[4919]: I0930 21:03:37.936690 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6f9cd6fc64-z8qnp_7e3e697a-4e47-48d5-a3f9-ae4d4a772f60/placement-api/0.log" Sep 30 21:03:38 crc kubenswrapper[4919]: I0930 21:03:38.089046 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6f9cd6fc64-z8qnp_7e3e697a-4e47-48d5-a3f9-ae4d4a772f60/placement-log/0.log" Sep 30 21:03:38 crc kubenswrapper[4919]: I0930 21:03:38.176909 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_af3fb66e-cbac-480c-b048-12f8bf6c2013/init-config-reloader/0.log" Sep 30 21:03:38 crc kubenswrapper[4919]: I0930 21:03:38.358848 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_af3fb66e-cbac-480c-b048-12f8bf6c2013/config-reloader/0.log" Sep 30 21:03:38 crc kubenswrapper[4919]: I0930 21:03:38.369896 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_af3fb66e-cbac-480c-b048-12f8bf6c2013/init-config-reloader/0.log" Sep 30 21:03:38 crc kubenswrapper[4919]: I0930 21:03:38.445038 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_af3fb66e-cbac-480c-b048-12f8bf6c2013/prometheus/0.log" Sep 30 21:03:38 crc kubenswrapper[4919]: I0930 21:03:38.647157 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_743c3f7e-1714-48ce-85ba-bf201f5b1c8c/setup-container/0.log" Sep 30 21:03:38 crc kubenswrapper[4919]: I0930 21:03:38.661893 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_af3fb66e-cbac-480c-b048-12f8bf6c2013/thanos-sidecar/0.log" Sep 30 21:03:38 crc kubenswrapper[4919]: I0930 21:03:38.871003 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_743c3f7e-1714-48ce-85ba-bf201f5b1c8c/rabbitmq/0.log" Sep 30 21:03:38 crc kubenswrapper[4919]: I0930 21:03:38.883145 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_743c3f7e-1714-48ce-85ba-bf201f5b1c8c/setup-container/0.log" Sep 30 21:03:39 crc kubenswrapper[4919]: I0930 21:03:39.390206 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_edc716fe-90af-4fa2-a733-d4c3fc3e76b9/setup-container/0.log" Sep 30 21:03:39 crc kubenswrapper[4919]: I0930 21:03:39.593684 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_edc716fe-90af-4fa2-a733-d4c3fc3e76b9/setup-container/0.log" Sep 30 21:03:39 crc kubenswrapper[4919]: I0930 21:03:39.600121 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_edc716fe-90af-4fa2-a733-d4c3fc3e76b9/rabbitmq/0.log" Sep 30 21:03:39 crc kubenswrapper[4919]: I0930 21:03:39.786734 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-qp4k5_4d98ac92-d78b-4df7-82bd-430f274f9ee0/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:39 crc kubenswrapper[4919]: I0930 21:03:39.915143 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-5vq8x_99e35059-a993-4792-bad5-4bfb1615a04f/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:40 crc kubenswrapper[4919]: I0930 21:03:40.075028 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-n452p_73d4ec74-f76d-437b-b91b-dc0e75157be8/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:40 crc kubenswrapper[4919]: I0930 21:03:40.233386 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-c5zkc_d45c6797-70f3-4dd7-ba6b-51e2ea97a0cb/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:40 crc kubenswrapper[4919]: I0930 21:03:40.442596 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-xdv77_ab2683ea-36ab-40a3-9a08-af8c2cc5f3d6/ssh-known-hosts-edpm-deployment/0.log" Sep 30 21:03:40 crc kubenswrapper[4919]: I0930 21:03:40.604677 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-79b575f787-8gljl_371dfa65-4310-40a1-b28c-74f5ec1071fd/proxy-server/0.log" Sep 30 21:03:40 crc kubenswrapper[4919]: I0930 21:03:40.689998 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-79b575f787-8gljl_371dfa65-4310-40a1-b28c-74f5ec1071fd/proxy-httpd/0.log" Sep 30 21:03:40 crc kubenswrapper[4919]: I0930 21:03:40.839366 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-h6ch2_c09b8ac9-d600-48b6-9ebc-be7b2cbe74ad/swift-ring-rebalance/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.029642 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/account-auditor/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.075047 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/account-reaper/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.142511 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/account-replicator/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.204355 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/account-server/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.267591 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/container-auditor/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.409470 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/container-replicator/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.417427 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/container-server/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.469763 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/container-updater/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.580602 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/object-auditor/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.608707 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/object-expirer/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.683869 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/object-replicator/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.782159 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/object-server/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.799653 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/object-updater/0.log" Sep 30 21:03:41 crc kubenswrapper[4919]: I0930 21:03:41.886053 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/rsync/0.log" Sep 30 21:03:42 crc kubenswrapper[4919]: I0930 21:03:42.008057 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_30462126-2244-47cd-8076-12744196012d/swift-recon-cron/0.log" Sep 30 21:03:42 crc kubenswrapper[4919]: I0930 21:03:42.151267 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-fnww4_e4e968a1-eb51-4c2e-9672-ff0a6f050948/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:42 crc kubenswrapper[4919]: I0930 21:03:42.303829 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-s6dz4_b0af6183-c25a-420d-968c-73d8341d5547/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 30 21:03:44 crc kubenswrapper[4919]: I0930 21:03:44.631635 4919 scope.go:117] "RemoveContainer" containerID="882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4" Sep 30 21:03:44 crc kubenswrapper[4919]: E0930 21:03:44.632227 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:03:46 crc kubenswrapper[4919]: I0930 21:03:46.166943 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_f5483de2-8939-4696-969b-efa0a56de229/memcached/0.log" Sep 30 21:03:46 crc kubenswrapper[4919]: I0930 21:03:46.632414 4919 scope.go:117] "RemoveContainer" containerID="60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e" Sep 30 21:03:47 crc kubenswrapper[4919]: I0930 21:03:47.101038 4919 generic.go:334] "Generic (PLEG): container finished" podID="0dde18df-d1bd-4b36-82af-cd0967cd942b" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" exitCode=2 Sep 30 21:03:47 crc kubenswrapper[4919]: I0930 21:03:47.101125 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" event={"ID":"0dde18df-d1bd-4b36-82af-cd0967cd942b","Type":"ContainerDied","Data":"39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587"} Sep 30 21:03:47 crc kubenswrapper[4919]: I0930 21:03:47.101622 4919 scope.go:117] "RemoveContainer" containerID="60d7cae367ff0f4a3e95446f189b9d9efff7df1bf05a2f201c9ec0d14de08e8e" Sep 30 21:03:47 crc kubenswrapper[4919]: I0930 21:03:47.102402 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:03:47 crc kubenswrapper[4919]: E0930 21:03:47.102795 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:03:47 crc kubenswrapper[4919]: I0930 21:03:47.632661 4919 scope.go:117] "RemoveContainer" containerID="600dc83417fc6e0c81d87b5096b480e375eb99db17f205ac974475a2d3fec249" Sep 30 21:03:47 crc kubenswrapper[4919]: E0930 21:03:47.633150 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:03:48 crc kubenswrapper[4919]: I0930 21:03:48.631718 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:03:48 crc kubenswrapper[4919]: E0930 21:03:48.632130 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:03:49 crc kubenswrapper[4919]: I0930 21:03:49.592840 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:03:49 crc kubenswrapper[4919]: I0930 21:03:49.593884 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:03:49 crc kubenswrapper[4919]: E0930 21:03:49.594117 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:03:55 crc kubenswrapper[4919]: I0930 21:03:55.640171 4919 scope.go:117] "RemoveContainer" containerID="882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4" Sep 30 21:03:56 crc kubenswrapper[4919]: I0930 21:03:56.191162 4919 generic.go:334] "Generic (PLEG): container finished" podID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" exitCode=2 Sep 30 21:03:56 crc kubenswrapper[4919]: I0930 21:03:56.191337 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" event={"ID":"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3","Type":"ContainerDied","Data":"33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987"} Sep 30 21:03:56 crc kubenswrapper[4919]: I0930 21:03:56.191604 4919 scope.go:117] "RemoveContainer" containerID="882d5937ef4101c36b1bf9f9e96c4767b0aec861d956bbfa84b826e657f885d4" Sep 30 21:03:56 crc kubenswrapper[4919]: I0930 21:03:56.192464 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:03:56 crc kubenswrapper[4919]: E0930 21:03:56.192925 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:03:59 crc kubenswrapper[4919]: I0930 21:03:59.424327 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:03:59 crc kubenswrapper[4919]: I0930 21:03:59.425761 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:03:59 crc kubenswrapper[4919]: E0930 21:03:59.426077 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:04:00 crc kubenswrapper[4919]: I0930 21:04:00.633234 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:04:00 crc kubenswrapper[4919]: E0930 21:04:00.633784 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:04:01 crc kubenswrapper[4919]: I0930 21:04:01.632644 4919 scope.go:117] "RemoveContainer" containerID="600dc83417fc6e0c81d87b5096b480e375eb99db17f205ac974475a2d3fec249" Sep 30 21:04:01 crc kubenswrapper[4919]: E0930 21:04:01.633455 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:04:04 crc kubenswrapper[4919]: I0930 21:04:04.632060 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:04:04 crc kubenswrapper[4919]: E0930 21:04:04.632777 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:04:07 crc kubenswrapper[4919]: I0930 21:04:07.315149 4919 generic.go:334] "Generic (PLEG): container finished" podID="b25ec85c-a544-4c2e-9e16-042c3632e174" containerID="bb6f4e140344374c5dec36c318fbfbe7fd58f4dd9c2623937391563f14bdc2dd" exitCode=0 Sep 30 21:04:07 crc kubenswrapper[4919]: I0930 21:04:07.315251 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25ld5/crc-debug-x28v7" event={"ID":"b25ec85c-a544-4c2e-9e16-042c3632e174","Type":"ContainerDied","Data":"bb6f4e140344374c5dec36c318fbfbe7fd58f4dd9c2623937391563f14bdc2dd"} Sep 30 21:04:08 crc kubenswrapper[4919]: I0930 21:04:08.486014 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/crc-debug-x28v7" Sep 30 21:04:08 crc kubenswrapper[4919]: I0930 21:04:08.524701 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-25ld5/crc-debug-x28v7"] Sep 30 21:04:08 crc kubenswrapper[4919]: I0930 21:04:08.533315 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-25ld5/crc-debug-x28v7"] Sep 30 21:04:08 crc kubenswrapper[4919]: I0930 21:04:08.591418 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b25ec85c-a544-4c2e-9e16-042c3632e174-host\") pod \"b25ec85c-a544-4c2e-9e16-042c3632e174\" (UID: \"b25ec85c-a544-4c2e-9e16-042c3632e174\") " Sep 30 21:04:08 crc kubenswrapper[4919]: I0930 21:04:08.591519 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rktnz\" (UniqueName: \"kubernetes.io/projected/b25ec85c-a544-4c2e-9e16-042c3632e174-kube-api-access-rktnz\") pod \"b25ec85c-a544-4c2e-9e16-042c3632e174\" (UID: \"b25ec85c-a544-4c2e-9e16-042c3632e174\") " Sep 30 21:04:08 crc kubenswrapper[4919]: I0930 21:04:08.591642 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b25ec85c-a544-4c2e-9e16-042c3632e174-host" (OuterVolumeSpecName: "host") pod "b25ec85c-a544-4c2e-9e16-042c3632e174" (UID: "b25ec85c-a544-4c2e-9e16-042c3632e174"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:04:08 crc kubenswrapper[4919]: I0930 21:04:08.592603 4919 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b25ec85c-a544-4c2e-9e16-042c3632e174-host\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:08 crc kubenswrapper[4919]: I0930 21:04:08.601643 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b25ec85c-a544-4c2e-9e16-042c3632e174-kube-api-access-rktnz" (OuterVolumeSpecName: "kube-api-access-rktnz") pod "b25ec85c-a544-4c2e-9e16-042c3632e174" (UID: "b25ec85c-a544-4c2e-9e16-042c3632e174"). InnerVolumeSpecName "kube-api-access-rktnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:04:08 crc kubenswrapper[4919]: I0930 21:04:08.695082 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rktnz\" (UniqueName: \"kubernetes.io/projected/b25ec85c-a544-4c2e-9e16-042c3632e174-kube-api-access-rktnz\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.356192 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20445499d822bc6f2f1b5bb08d801128de62a92f028436e5b5bf16a113beada5" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.356291 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/crc-debug-x28v7" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.424161 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.424906 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:04:09 crc kubenswrapper[4919]: E0930 21:04:09.425198 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.592357 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.592957 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:04:09 crc kubenswrapper[4919]: E0930 21:04:09.593161 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.647063 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b25ec85c-a544-4c2e-9e16-042c3632e174" path="/var/lib/kubelet/pods/b25ec85c-a544-4c2e-9e16-042c3632e174/volumes" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.760506 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-25ld5/crc-debug-n5jkb"] Sep 30 21:04:09 crc kubenswrapper[4919]: E0930 21:04:09.761018 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b25ec85c-a544-4c2e-9e16-042c3632e174" containerName="container-00" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.761040 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="b25ec85c-a544-4c2e-9e16-042c3632e174" containerName="container-00" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.761351 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="b25ec85c-a544-4c2e-9e16-042c3632e174" containerName="container-00" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.762768 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/crc-debug-n5jkb" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.823303 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6b1d4962-a4eb-4320-8fe2-230f56fa0dea-host\") pod \"crc-debug-n5jkb\" (UID: \"6b1d4962-a4eb-4320-8fe2-230f56fa0dea\") " pod="openshift-must-gather-25ld5/crc-debug-n5jkb" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.823503 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck75h\" (UniqueName: \"kubernetes.io/projected/6b1d4962-a4eb-4320-8fe2-230f56fa0dea-kube-api-access-ck75h\") pod \"crc-debug-n5jkb\" (UID: \"6b1d4962-a4eb-4320-8fe2-230f56fa0dea\") " pod="openshift-must-gather-25ld5/crc-debug-n5jkb" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.925309 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6b1d4962-a4eb-4320-8fe2-230f56fa0dea-host\") pod \"crc-debug-n5jkb\" (UID: \"6b1d4962-a4eb-4320-8fe2-230f56fa0dea\") " pod="openshift-must-gather-25ld5/crc-debug-n5jkb" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.925461 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6b1d4962-a4eb-4320-8fe2-230f56fa0dea-host\") pod \"crc-debug-n5jkb\" (UID: \"6b1d4962-a4eb-4320-8fe2-230f56fa0dea\") " pod="openshift-must-gather-25ld5/crc-debug-n5jkb" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.925511 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck75h\" (UniqueName: \"kubernetes.io/projected/6b1d4962-a4eb-4320-8fe2-230f56fa0dea-kube-api-access-ck75h\") pod \"crc-debug-n5jkb\" (UID: \"6b1d4962-a4eb-4320-8fe2-230f56fa0dea\") " pod="openshift-must-gather-25ld5/crc-debug-n5jkb" Sep 30 21:04:09 crc kubenswrapper[4919]: I0930 21:04:09.957186 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck75h\" (UniqueName: \"kubernetes.io/projected/6b1d4962-a4eb-4320-8fe2-230f56fa0dea-kube-api-access-ck75h\") pod \"crc-debug-n5jkb\" (UID: \"6b1d4962-a4eb-4320-8fe2-230f56fa0dea\") " pod="openshift-must-gather-25ld5/crc-debug-n5jkb" Sep 30 21:04:10 crc kubenswrapper[4919]: I0930 21:04:10.092233 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/crc-debug-n5jkb" Sep 30 21:04:10 crc kubenswrapper[4919]: I0930 21:04:10.368295 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25ld5/crc-debug-n5jkb" event={"ID":"6b1d4962-a4eb-4320-8fe2-230f56fa0dea","Type":"ContainerStarted","Data":"2d990c0443643428ab62fd05f532fec497e454f0988ca90a6ace2b9ed1bc5db9"} Sep 30 21:04:11 crc kubenswrapper[4919]: I0930 21:04:11.399762 4919 generic.go:334] "Generic (PLEG): container finished" podID="6b1d4962-a4eb-4320-8fe2-230f56fa0dea" containerID="3166941e0d234e73fa67de52fbf9d1baa43a74e841acff18eb607c7631b873bd" exitCode=0 Sep 30 21:04:11 crc kubenswrapper[4919]: I0930 21:04:11.399858 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25ld5/crc-debug-n5jkb" event={"ID":"6b1d4962-a4eb-4320-8fe2-230f56fa0dea","Type":"ContainerDied","Data":"3166941e0d234e73fa67de52fbf9d1baa43a74e841acff18eb607c7631b873bd"} Sep 30 21:04:12 crc kubenswrapper[4919]: I0930 21:04:12.530378 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/crc-debug-n5jkb" Sep 30 21:04:12 crc kubenswrapper[4919]: I0930 21:04:12.579747 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ck75h\" (UniqueName: \"kubernetes.io/projected/6b1d4962-a4eb-4320-8fe2-230f56fa0dea-kube-api-access-ck75h\") pod \"6b1d4962-a4eb-4320-8fe2-230f56fa0dea\" (UID: \"6b1d4962-a4eb-4320-8fe2-230f56fa0dea\") " Sep 30 21:04:12 crc kubenswrapper[4919]: I0930 21:04:12.579918 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6b1d4962-a4eb-4320-8fe2-230f56fa0dea-host\") pod \"6b1d4962-a4eb-4320-8fe2-230f56fa0dea\" (UID: \"6b1d4962-a4eb-4320-8fe2-230f56fa0dea\") " Sep 30 21:04:12 crc kubenswrapper[4919]: I0930 21:04:12.580166 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6b1d4962-a4eb-4320-8fe2-230f56fa0dea-host" (OuterVolumeSpecName: "host") pod "6b1d4962-a4eb-4320-8fe2-230f56fa0dea" (UID: "6b1d4962-a4eb-4320-8fe2-230f56fa0dea"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:04:12 crc kubenswrapper[4919]: I0930 21:04:12.580872 4919 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6b1d4962-a4eb-4320-8fe2-230f56fa0dea-host\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:12 crc kubenswrapper[4919]: I0930 21:04:12.588527 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b1d4962-a4eb-4320-8fe2-230f56fa0dea-kube-api-access-ck75h" (OuterVolumeSpecName: "kube-api-access-ck75h") pod "6b1d4962-a4eb-4320-8fe2-230f56fa0dea" (UID: "6b1d4962-a4eb-4320-8fe2-230f56fa0dea"). InnerVolumeSpecName "kube-api-access-ck75h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:04:12 crc kubenswrapper[4919]: I0930 21:04:12.683373 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ck75h\" (UniqueName: \"kubernetes.io/projected/6b1d4962-a4eb-4320-8fe2-230f56fa0dea-kube-api-access-ck75h\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:13 crc kubenswrapper[4919]: I0930 21:04:13.421694 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25ld5/crc-debug-n5jkb" event={"ID":"6b1d4962-a4eb-4320-8fe2-230f56fa0dea","Type":"ContainerDied","Data":"2d990c0443643428ab62fd05f532fec497e454f0988ca90a6ace2b9ed1bc5db9"} Sep 30 21:04:13 crc kubenswrapper[4919]: I0930 21:04:13.422020 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d990c0443643428ab62fd05f532fec497e454f0988ca90a6ace2b9ed1bc5db9" Sep 30 21:04:13 crc kubenswrapper[4919]: I0930 21:04:13.421781 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/crc-debug-n5jkb" Sep 30 21:04:14 crc kubenswrapper[4919]: I0930 21:04:14.633152 4919 scope.go:117] "RemoveContainer" containerID="600dc83417fc6e0c81d87b5096b480e375eb99db17f205ac974475a2d3fec249" Sep 30 21:04:15 crc kubenswrapper[4919]: I0930 21:04:15.443507 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerStarted","Data":"b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897"} Sep 30 21:04:15 crc kubenswrapper[4919]: I0930 21:04:15.647562 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:04:15 crc kubenswrapper[4919]: E0930 21:04:15.648125 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:04:17 crc kubenswrapper[4919]: I0930 21:04:17.318486 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:04:17 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:04:17 crc kubenswrapper[4919]: > Sep 30 21:04:17 crc kubenswrapper[4919]: I0930 21:04:17.829471 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-25ld5/crc-debug-n5jkb"] Sep 30 21:04:17 crc kubenswrapper[4919]: I0930 21:04:17.838708 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-25ld5/crc-debug-n5jkb"] Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.052414 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-25ld5/crc-debug-lmlnz"] Sep 30 21:04:19 crc kubenswrapper[4919]: E0930 21:04:19.052845 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b1d4962-a4eb-4320-8fe2-230f56fa0dea" containerName="container-00" Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.052858 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b1d4962-a4eb-4320-8fe2-230f56fa0dea" containerName="container-00" Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.053081 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b1d4962-a4eb-4320-8fe2-230f56fa0dea" containerName="container-00" Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.053855 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/crc-debug-lmlnz" Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.096813 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f352ff1f-8982-4146-a71a-66b7f558f16b-host\") pod \"crc-debug-lmlnz\" (UID: \"f352ff1f-8982-4146-a71a-66b7f558f16b\") " pod="openshift-must-gather-25ld5/crc-debug-lmlnz" Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.096913 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwppb\" (UniqueName: \"kubernetes.io/projected/f352ff1f-8982-4146-a71a-66b7f558f16b-kube-api-access-lwppb\") pod \"crc-debug-lmlnz\" (UID: \"f352ff1f-8982-4146-a71a-66b7f558f16b\") " pod="openshift-must-gather-25ld5/crc-debug-lmlnz" Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.198991 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f352ff1f-8982-4146-a71a-66b7f558f16b-host\") pod \"crc-debug-lmlnz\" (UID: \"f352ff1f-8982-4146-a71a-66b7f558f16b\") " pod="openshift-must-gather-25ld5/crc-debug-lmlnz" Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.199095 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwppb\" (UniqueName: \"kubernetes.io/projected/f352ff1f-8982-4146-a71a-66b7f558f16b-kube-api-access-lwppb\") pod \"crc-debug-lmlnz\" (UID: \"f352ff1f-8982-4146-a71a-66b7f558f16b\") " pod="openshift-must-gather-25ld5/crc-debug-lmlnz" Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.199109 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f352ff1f-8982-4146-a71a-66b7f558f16b-host\") pod \"crc-debug-lmlnz\" (UID: \"f352ff1f-8982-4146-a71a-66b7f558f16b\") " pod="openshift-must-gather-25ld5/crc-debug-lmlnz" Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.219274 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwppb\" (UniqueName: \"kubernetes.io/projected/f352ff1f-8982-4146-a71a-66b7f558f16b-kube-api-access-lwppb\") pod \"crc-debug-lmlnz\" (UID: \"f352ff1f-8982-4146-a71a-66b7f558f16b\") " pod="openshift-must-gather-25ld5/crc-debug-lmlnz" Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.377555 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/crc-debug-lmlnz" Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.489302 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25ld5/crc-debug-lmlnz" event={"ID":"f352ff1f-8982-4146-a71a-66b7f558f16b","Type":"ContainerStarted","Data":"d844298098a6acf122e29261762fd5e7de31b982fcb0a83a4ed2a8d33caddcff"} Sep 30 21:04:19 crc kubenswrapper[4919]: I0930 21:04:19.649677 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b1d4962-a4eb-4320-8fe2-230f56fa0dea" path="/var/lib/kubelet/pods/6b1d4962-a4eb-4320-8fe2-230f56fa0dea/volumes" Sep 30 21:04:20 crc kubenswrapper[4919]: I0930 21:04:20.424030 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:04:20 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:04:20 crc kubenswrapper[4919]: > Sep 30 21:04:20 crc kubenswrapper[4919]: I0930 21:04:20.504637 4919 generic.go:334] "Generic (PLEG): container finished" podID="f352ff1f-8982-4146-a71a-66b7f558f16b" containerID="f9ba927c11103822d9ace501597ea019ff6a74986f67440d52ccf9875eb44d1d" exitCode=0 Sep 30 21:04:20 crc kubenswrapper[4919]: I0930 21:04:20.504705 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25ld5/crc-debug-lmlnz" event={"ID":"f352ff1f-8982-4146-a71a-66b7f558f16b","Type":"ContainerDied","Data":"f9ba927c11103822d9ace501597ea019ff6a74986f67440d52ccf9875eb44d1d"} Sep 30 21:04:20 crc kubenswrapper[4919]: I0930 21:04:20.565713 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-25ld5/crc-debug-lmlnz"] Sep 30 21:04:20 crc kubenswrapper[4919]: I0930 21:04:20.573862 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-25ld5/crc-debug-lmlnz"] Sep 30 21:04:21 crc kubenswrapper[4919]: I0930 21:04:21.617303 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/crc-debug-lmlnz" Sep 30 21:04:21 crc kubenswrapper[4919]: I0930 21:04:21.632686 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:04:21 crc kubenswrapper[4919]: I0930 21:04:21.633087 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:04:21 crc kubenswrapper[4919]: E0930 21:04:21.633123 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:04:21 crc kubenswrapper[4919]: E0930 21:04:21.633370 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:04:21 crc kubenswrapper[4919]: I0930 21:04:21.646629 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwppb\" (UniqueName: \"kubernetes.io/projected/f352ff1f-8982-4146-a71a-66b7f558f16b-kube-api-access-lwppb\") pod \"f352ff1f-8982-4146-a71a-66b7f558f16b\" (UID: \"f352ff1f-8982-4146-a71a-66b7f558f16b\") " Sep 30 21:04:21 crc kubenswrapper[4919]: I0930 21:04:21.646958 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f352ff1f-8982-4146-a71a-66b7f558f16b-host\") pod \"f352ff1f-8982-4146-a71a-66b7f558f16b\" (UID: \"f352ff1f-8982-4146-a71a-66b7f558f16b\") " Sep 30 21:04:21 crc kubenswrapper[4919]: I0930 21:04:21.652427 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f352ff1f-8982-4146-a71a-66b7f558f16b-host" (OuterVolumeSpecName: "host") pod "f352ff1f-8982-4146-a71a-66b7f558f16b" (UID: "f352ff1f-8982-4146-a71a-66b7f558f16b"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 30 21:04:21 crc kubenswrapper[4919]: I0930 21:04:21.654564 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f352ff1f-8982-4146-a71a-66b7f558f16b-kube-api-access-lwppb" (OuterVolumeSpecName: "kube-api-access-lwppb") pod "f352ff1f-8982-4146-a71a-66b7f558f16b" (UID: "f352ff1f-8982-4146-a71a-66b7f558f16b"). InnerVolumeSpecName "kube-api-access-lwppb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:04:21 crc kubenswrapper[4919]: I0930 21:04:21.750414 4919 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f352ff1f-8982-4146-a71a-66b7f558f16b-host\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:21 crc kubenswrapper[4919]: I0930 21:04:21.750452 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwppb\" (UniqueName: \"kubernetes.io/projected/f352ff1f-8982-4146-a71a-66b7f558f16b-kube-api-access-lwppb\") on node \"crc\" DevicePath \"\"" Sep 30 21:04:22 crc kubenswrapper[4919]: I0930 21:04:22.242275 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd_78f527bb-835b-438b-a68f-bc7a4ffc921f/util/0.log" Sep 30 21:04:22 crc kubenswrapper[4919]: I0930 21:04:22.441713 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd_78f527bb-835b-438b-a68f-bc7a4ffc921f/util/0.log" Sep 30 21:04:22 crc kubenswrapper[4919]: I0930 21:04:22.462930 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd_78f527bb-835b-438b-a68f-bc7a4ffc921f/pull/0.log" Sep 30 21:04:22 crc kubenswrapper[4919]: I0930 21:04:22.474538 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd_78f527bb-835b-438b-a68f-bc7a4ffc921f/pull/0.log" Sep 30 21:04:22 crc kubenswrapper[4919]: I0930 21:04:22.523205 4919 scope.go:117] "RemoveContainer" containerID="f9ba927c11103822d9ace501597ea019ff6a74986f67440d52ccf9875eb44d1d" Sep 30 21:04:22 crc kubenswrapper[4919]: I0930 21:04:22.523325 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/crc-debug-lmlnz" Sep 30 21:04:22 crc kubenswrapper[4919]: I0930 21:04:22.725300 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd_78f527bb-835b-438b-a68f-bc7a4ffc921f/pull/0.log" Sep 30 21:04:22 crc kubenswrapper[4919]: I0930 21:04:22.751481 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd_78f527bb-835b-438b-a68f-bc7a4ffc921f/util/0.log" Sep 30 21:04:22 crc kubenswrapper[4919]: I0930 21:04:22.757027 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_0f7365a3206c161a8d68ef378a1abad8677e7b04b793b4a52f76f1b660rmtxd_78f527bb-835b-438b-a68f-bc7a4ffc921f/extract/0.log" Sep 30 21:04:22 crc kubenswrapper[4919]: I0930 21:04:22.905421 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-nfpc8_fd8b2eba-9c90-4a16-b470-6e43eaa38f4d/kube-rbac-proxy/0.log" Sep 30 21:04:22 crc kubenswrapper[4919]: I0930 21:04:22.923346 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-7vfvh_35922b82-d9a9-425b-89e2-919fd9d937dd/kube-rbac-proxy/0.log" Sep 30 21:04:22 crc kubenswrapper[4919]: I0930 21:04:22.978984 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-nfpc8_fd8b2eba-9c90-4a16-b470-6e43eaa38f4d/manager/0.log" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.134646 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-7vfvh_35922b82-d9a9-425b-89e2-919fd9d937dd/manager/0.log" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.151360 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-mjllk_75189fe6-5b26-4743-b2e2-8e0fee41c653/kube-rbac-proxy/0.log" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.207478 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-mjllk_75189fe6-5b26-4743-b2e2-8e0fee41c653/manager/0.log" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.320136 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-k6295_1fbeecba-9bf8-44ef-819b-63bcf26ce691/kube-rbac-proxy/0.log" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.372621 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:04:23 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:04:23 crc kubenswrapper[4919]: > Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.372727 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-proc-0" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.373990 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cloudkitty-proc" containerStatusID={"Type":"cri-o","ID":"b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897"} pod="openstack/cloudkitty-proc-0" containerMessage="Container cloudkitty-proc failed liveness probe, will be restarted" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.374043 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" containerID="cri-o://b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897" gracePeriod=30 Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.418136 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-k6295_1fbeecba-9bf8-44ef-819b-63bcf26ce691/manager/0.log" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.514011 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-5dtjx_7fc088d5-3fb5-40a2-b086-c1a4e52a325e/manager/0.log" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.532373 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-5dtjx_7fc088d5-3fb5-40a2-b086-c1a4e52a325e/kube-rbac-proxy/0.log" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.607367 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-w2t4d_94b17ff0-8f16-4683-8153-a0d8b2b55437/kube-rbac-proxy/0.log" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.651443 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f352ff1f-8982-4146-a71a-66b7f558f16b" path="/var/lib/kubelet/pods/f352ff1f-8982-4146-a71a-66b7f558f16b/volumes" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.672953 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-w2t4d_94b17ff0-8f16-4683-8153-a0d8b2b55437/manager/0.log" Sep 30 21:04:23 crc kubenswrapper[4919]: I0930 21:04:23.887744 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-9d6c5db85-v96qz_1053b07d-a2f6-4580-8edd-65e680622c9e/kube-rbac-proxy/0.log" Sep 30 21:04:23 crc kubenswrapper[4919]: E0930 21:04:23.906080 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.065185 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-9d6c5db85-v96qz_1053b07d-a2f6-4580-8edd-65e680622c9e/manager/0.log" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.080113 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-2w4vp_570ac8cc-5b75-4404-9df9-36387db5e5aa/kube-rbac-proxy/0.log" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.142530 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-2w4vp_570ac8cc-5b75-4404-9df9-36387db5e5aa/manager/0.log" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.280993 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-p9rsr_469c99b8-4171-48c7-9091-fbab0c200c11/kube-rbac-proxy/0.log" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.339011 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-p9rsr_469c99b8-4171-48c7-9091-fbab0c200c11/manager/0.log" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.379004 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-2mrbb_8b7b2889-ed1b-45b0-909c-011b3fbee825/kube-rbac-proxy/0.log" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.461168 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-2mrbb_8b7b2889-ed1b-45b0-909c-011b3fbee825/manager/0.log" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.522622 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-d2k8w_19fb5b55-7b88-47ff-a4e5-b8995a29db8f/kube-rbac-proxy/0.log" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.552237 4919 generic.go:334] "Generic (PLEG): container finished" podID="983be098-678e-4ecb-a684-7874ae171f14" containerID="b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897" exitCode=0 Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.552283 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerDied","Data":"b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897"} Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.552329 4919 scope.go:117] "RemoveContainer" containerID="600dc83417fc6e0c81d87b5096b480e375eb99db17f205ac974475a2d3fec249" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.553149 4919 scope.go:117] "RemoveContainer" containerID="b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897" Sep 30 21:04:24 crc kubenswrapper[4919]: E0930 21:04:24.553592 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.613559 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-d2k8w_19fb5b55-7b88-47ff-a4e5-b8995a29db8f/manager/0.log" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.688107 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-57rlr_6ca3a550-cff6-49a7-ae12-43f75f743cb2/kube-rbac-proxy/0.log" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.795483 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-57rlr_6ca3a550-cff6-49a7-ae12-43f75f743cb2/manager/0.log" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.867790 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-5sgrl_bc2e190b-bcce-456a-938a-4a2cc054a43c/kube-rbac-proxy/0.log" Sep 30 21:04:24 crc kubenswrapper[4919]: I0930 21:04:24.955598 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-5sgrl_bc2e190b-bcce-456a-938a-4a2cc054a43c/manager/0.log" Sep 30 21:04:25 crc kubenswrapper[4919]: I0930 21:04:25.031635 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-56jpm_b1d4f4a6-d94c-4b73-8f95-9378547c5453/kube-rbac-proxy/0.log" Sep 30 21:04:25 crc kubenswrapper[4919]: I0930 21:04:25.060444 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-56jpm_b1d4f4a6-d94c-4b73-8f95-9378547c5453/manager/0.log" Sep 30 21:04:25 crc kubenswrapper[4919]: I0930 21:04:25.182879 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-qrxkz_25bcaa59-d154-41d1-8f73-92f41da4e3a9/kube-rbac-proxy/0.log" Sep 30 21:04:25 crc kubenswrapper[4919]: I0930 21:04:25.208520 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-qrxkz_25bcaa59-d154-41d1-8f73-92f41da4e3a9/manager/0.log" Sep 30 21:04:25 crc kubenswrapper[4919]: I0930 21:04:25.392720 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6688bc8b84-h7q66_0f662e98-a8ef-4ae4-8d9b-2853a779ecf6/kube-rbac-proxy/0.log" Sep 30 21:04:25 crc kubenswrapper[4919]: I0930 21:04:25.529272 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-59b4657894-mfdh8_e471c350-4736-4417-8d84-8643b8da1be2/kube-rbac-proxy/0.log" Sep 30 21:04:25 crc kubenswrapper[4919]: I0930 21:04:25.654829 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-6djkx_af135c02-a48d-4046-9412-120ee15f6ea3/registry-server/0.log" Sep 30 21:04:25 crc kubenswrapper[4919]: I0930 21:04:25.790370 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-59b4657894-mfdh8_e471c350-4736-4417-8d84-8643b8da1be2/operator/0.log" Sep 30 21:04:25 crc kubenswrapper[4919]: I0930 21:04:25.869899 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-zf7wn_e5b640a6-b206-4061-95f7-59c09848b709/kube-rbac-proxy/0.log" Sep 30 21:04:25 crc kubenswrapper[4919]: I0930 21:04:25.971973 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-k95j5_fc527cf7-785e-41fb-9162-fb0c93fc20ff/kube-rbac-proxy/0.log" Sep 30 21:04:25 crc kubenswrapper[4919]: I0930 21:04:25.978999 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-zf7wn_e5b640a6-b206-4061-95f7-59c09848b709/manager/0.log" Sep 30 21:04:26 crc kubenswrapper[4919]: I0930 21:04:26.146262 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-k95j5_fc527cf7-785e-41fb-9162-fb0c93fc20ff/manager/0.log" Sep 30 21:04:26 crc kubenswrapper[4919]: I0930 21:04:26.202258 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-d9lcd_8d89dcea-1720-4d39-8ea1-016d4c2ad572/operator/0.log" Sep 30 21:04:26 crc kubenswrapper[4919]: I0930 21:04:26.414196 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-xlw57_f6ccf519-3c56-404b-a649-17f0cda5f592/kube-rbac-proxy/0.log" Sep 30 21:04:26 crc kubenswrapper[4919]: I0930 21:04:26.434336 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-xlw57_f6ccf519-3c56-404b-a649-17f0cda5f592/manager/0.log" Sep 30 21:04:26 crc kubenswrapper[4919]: I0930 21:04:26.450364 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-fb4cc5b89-ktj4d_803e4642-1c89-4c17-8d49-43496c3fade8/kube-rbac-proxy/0.log" Sep 30 21:04:26 crc kubenswrapper[4919]: I0930 21:04:26.573269 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6688bc8b84-h7q66_0f662e98-a8ef-4ae4-8d9b-2853a779ecf6/manager/0.log" Sep 30 21:04:26 crc kubenswrapper[4919]: I0930 21:04:26.684517 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-sqjrr_aa83041a-f63d-4879-8756-5a2929e81305/kube-rbac-proxy/0.log" Sep 30 21:04:26 crc kubenswrapper[4919]: I0930 21:04:26.812110 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-sqjrr_aa83041a-f63d-4879-8756-5a2929e81305/manager/0.log" Sep 30 21:04:26 crc kubenswrapper[4919]: I0930 21:04:26.838091 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-fb4cc5b89-ktj4d_803e4642-1c89-4c17-8d49-43496c3fade8/manager/0.log" Sep 30 21:04:26 crc kubenswrapper[4919]: I0930 21:04:26.907281 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-kdb2m_9ce83d6e-31dc-43d2-b413-055ee52b075d/kube-rbac-proxy/0.log" Sep 30 21:04:26 crc kubenswrapper[4919]: I0930 21:04:26.943952 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-kdb2m_9ce83d6e-31dc-43d2-b413-055ee52b075d/manager/0.log" Sep 30 21:04:27 crc kubenswrapper[4919]: I0930 21:04:27.632279 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:04:27 crc kubenswrapper[4919]: E0930 21:04:27.632606 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:04:32 crc kubenswrapper[4919]: I0930 21:04:32.632331 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:04:32 crc kubenswrapper[4919]: E0930 21:04:32.633029 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:04:35 crc kubenswrapper[4919]: I0930 21:04:35.646455 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:04:35 crc kubenswrapper[4919]: E0930 21:04:35.647122 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:04:39 crc kubenswrapper[4919]: I0930 21:04:39.632180 4919 scope.go:117] "RemoveContainer" containerID="b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897" Sep 30 21:04:39 crc kubenswrapper[4919]: E0930 21:04:39.633082 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:04:40 crc kubenswrapper[4919]: I0930 21:04:40.632631 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:04:40 crc kubenswrapper[4919]: E0930 21:04:40.632950 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:04:43 crc kubenswrapper[4919]: I0930 21:04:43.585112 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-lfnjg_764e2fc0-f6af-45a8-8a90-f78ce95abf62/control-plane-machine-set-operator/0.log" Sep 30 21:04:43 crc kubenswrapper[4919]: I0930 21:04:43.707056 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fdmjq_f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56/kube-rbac-proxy/0.log" Sep 30 21:04:43 crc kubenswrapper[4919]: I0930 21:04:43.786942 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-fdmjq_f9efe68f-8e01-4fc8-ba67-e0a1e9f70f56/machine-api-operator/0.log" Sep 30 21:04:44 crc kubenswrapper[4919]: I0930 21:04:44.632607 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:04:44 crc kubenswrapper[4919]: E0930 21:04:44.632847 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:04:49 crc kubenswrapper[4919]: I0930 21:04:49.632065 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:04:49 crc kubenswrapper[4919]: E0930 21:04:49.632750 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:04:51 crc kubenswrapper[4919]: I0930 21:04:51.635941 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:04:51 crc kubenswrapper[4919]: E0930 21:04:51.636525 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:04:54 crc kubenswrapper[4919]: I0930 21:04:54.632348 4919 scope.go:117] "RemoveContainer" containerID="b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897" Sep 30 21:04:54 crc kubenswrapper[4919]: E0930 21:04:54.632956 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:04:55 crc kubenswrapper[4919]: I0930 21:04:55.639322 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:04:55 crc kubenswrapper[4919]: E0930 21:04:55.639799 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:04:56 crc kubenswrapper[4919]: I0930 21:04:56.185576 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-pdg2j_5a0a267a-c9dc-41f4-bbfe-9ca579ac8c74/cert-manager-controller/0.log" Sep 30 21:04:56 crc kubenswrapper[4919]: I0930 21:04:56.379899 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-gvrpb_5d98bfd5-9d79-4bc5-9525-b9dae37efe66/cert-manager-cainjector/0.log" Sep 30 21:04:56 crc kubenswrapper[4919]: I0930 21:04:56.444374 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-qqlgt_8273c2ae-c5d8-4e76-bc5b-ca2c4bce93dc/cert-manager-webhook/0.log" Sep 30 21:05:02 crc kubenswrapper[4919]: I0930 21:05:02.633134 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:05:02 crc kubenswrapper[4919]: I0930 21:05:02.634062 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:05:02 crc kubenswrapper[4919]: E0930 21:05:02.634421 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:05:02 crc kubenswrapper[4919]: E0930 21:05:02.634458 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:05:08 crc kubenswrapper[4919]: I0930 21:05:08.631963 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:05:08 crc kubenswrapper[4919]: E0930 21:05:08.632592 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:05:09 crc kubenswrapper[4919]: I0930 21:05:09.289703 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-4mdbj_097f3e59-c8c4-4f4e-9d97-e6d402584649/nmstate-console-plugin/0.log" Sep 30 21:05:09 crc kubenswrapper[4919]: I0930 21:05:09.457931 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-kdt4l_b6a02ba1-cd2d-408a-8037-2f277448c7cf/nmstate-handler/0.log" Sep 30 21:05:09 crc kubenswrapper[4919]: I0930 21:05:09.497062 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-jvwwd_503e0849-5f67-41ad-b1d8-3ebd8c23cc09/kube-rbac-proxy/0.log" Sep 30 21:05:09 crc kubenswrapper[4919]: I0930 21:05:09.545201 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-jvwwd_503e0849-5f67-41ad-b1d8-3ebd8c23cc09/nmstate-metrics/0.log" Sep 30 21:05:09 crc kubenswrapper[4919]: I0930 21:05:09.632964 4919 scope.go:117] "RemoveContainer" containerID="b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897" Sep 30 21:05:09 crc kubenswrapper[4919]: E0930 21:05:09.633251 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:05:09 crc kubenswrapper[4919]: I0930 21:05:09.717017 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-6l8tm_89dca143-9969-4919-9dc9-1eeb1d4614e9/nmstate-operator/0.log" Sep 30 21:05:09 crc kubenswrapper[4919]: I0930 21:05:09.772759 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-7kc5q_a3942c38-5d58-41de-9bdb-afd674081e1e/nmstate-webhook/0.log" Sep 30 21:05:14 crc kubenswrapper[4919]: I0930 21:05:14.632228 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:05:14 crc kubenswrapper[4919]: E0930 21:05:14.632944 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:05:17 crc kubenswrapper[4919]: I0930 21:05:17.632907 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:05:17 crc kubenswrapper[4919]: E0930 21:05:17.633498 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:05:21 crc kubenswrapper[4919]: I0930 21:05:21.638471 4919 scope.go:117] "RemoveContainer" containerID="b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897" Sep 30 21:05:21 crc kubenswrapper[4919]: E0930 21:05:21.639085 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:05:21 crc kubenswrapper[4919]: I0930 21:05:21.721943 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-69dd967c6d-g6dkf_bed3f41f-9f7d-4838-a3a2-3ed58371a416/kube-rbac-proxy/0.log" Sep 30 21:05:21 crc kubenswrapper[4919]: I0930 21:05:21.770807 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-69dd967c6d-g6dkf_bed3f41f-9f7d-4838-a3a2-3ed58371a416/manager/0.log" Sep 30 21:05:22 crc kubenswrapper[4919]: I0930 21:05:22.632781 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:05:22 crc kubenswrapper[4919]: E0930 21:05:22.633180 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:05:29 crc kubenswrapper[4919]: I0930 21:05:29.632740 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:05:29 crc kubenswrapper[4919]: E0930 21:05:29.633904 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:05:30 crc kubenswrapper[4919]: I0930 21:05:30.632622 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:05:30 crc kubenswrapper[4919]: E0930 21:05:30.633246 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.024885 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-th6f6_5de2da45-8775-4784-9c80-810c6713751e/kube-rbac-proxy/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.137516 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-th6f6_5de2da45-8775-4784-9c80-810c6713751e/controller/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.228043 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/cp-frr-files/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.399262 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/cp-reloader/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.400012 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/cp-metrics/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.405855 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/cp-frr-files/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.445988 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/cp-reloader/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.615628 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/cp-frr-files/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.635080 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/cp-reloader/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.639072 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/cp-metrics/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.647093 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/cp-metrics/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.788831 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/cp-frr-files/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.823226 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/cp-metrics/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.824009 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/cp-reloader/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.833253 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/controller/0.log" Sep 30 21:05:34 crc kubenswrapper[4919]: I0930 21:05:34.979922 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/frr-metrics/0.log" Sep 30 21:05:35 crc kubenswrapper[4919]: I0930 21:05:35.036165 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/kube-rbac-proxy/0.log" Sep 30 21:05:35 crc kubenswrapper[4919]: I0930 21:05:35.036272 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/kube-rbac-proxy-frr/0.log" Sep 30 21:05:35 crc kubenswrapper[4919]: I0930 21:05:35.225930 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-dgcsl_07ab26d9-dc69-47bf-9d23-b0f94cf42749/frr-k8s-webhook-server/0.log" Sep 30 21:05:35 crc kubenswrapper[4919]: I0930 21:05:35.253785 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/reloader/0.log" Sep 30 21:05:35 crc kubenswrapper[4919]: I0930 21:05:35.443402 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-774854f49b-zvlnh_b90ce133-8951-4dee-92bd-f672580fb818/manager/0.log" Sep 30 21:05:35 crc kubenswrapper[4919]: I0930 21:05:35.608230 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-77fbfdddcb-cphcx_24a33fda-3d02-475b-96c4-4eef5f0a1dcf/webhook-server/0.log" Sep 30 21:05:35 crc kubenswrapper[4919]: I0930 21:05:35.639353 4919 scope.go:117] "RemoveContainer" containerID="b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897" Sep 30 21:05:35 crc kubenswrapper[4919]: E0930 21:05:35.639693 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 1m20s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:05:35 crc kubenswrapper[4919]: I0930 21:05:35.752835 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-zqj5g_2b1754b1-e5ff-4053-bd08-09773a42d4eb/kube-rbac-proxy/0.log" Sep 30 21:05:36 crc kubenswrapper[4919]: I0930 21:05:36.345240 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-zqj5g_2b1754b1-e5ff-4053-bd08-09773a42d4eb/speaker/0.log" Sep 30 21:05:36 crc kubenswrapper[4919]: I0930 21:05:36.499670 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-j8cms_7fdc3545-02de-4073-b40a-249a1a858d3c/frr/0.log" Sep 30 21:05:37 crc kubenswrapper[4919]: I0930 21:05:37.632508 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:05:37 crc kubenswrapper[4919]: E0930 21:05:37.633034 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:05:41 crc kubenswrapper[4919]: I0930 21:05:41.632833 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:05:41 crc kubenswrapper[4919]: E0930 21:05:41.633589 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:05:42 crc kubenswrapper[4919]: I0930 21:05:42.631968 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:05:42 crc kubenswrapper[4919]: E0930 21:05:42.632346 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:05:46 crc kubenswrapper[4919]: I0930 21:05:46.633088 4919 scope.go:117] "RemoveContainer" containerID="b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897" Sep 30 21:05:47 crc kubenswrapper[4919]: I0930 21:05:47.498816 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerStarted","Data":"f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021"} Sep 30 21:05:48 crc kubenswrapper[4919]: I0930 21:05:48.851640 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg_1c228af8-0449-4b6c-95e3-ef80d378fbdd/util/0.log" Sep 30 21:05:49 crc kubenswrapper[4919]: I0930 21:05:49.221686 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg_1c228af8-0449-4b6c-95e3-ef80d378fbdd/util/0.log" Sep 30 21:05:49 crc kubenswrapper[4919]: I0930 21:05:49.250614 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg_1c228af8-0449-4b6c-95e3-ef80d378fbdd/pull/0.log" Sep 30 21:05:49 crc kubenswrapper[4919]: I0930 21:05:49.281670 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg_1c228af8-0449-4b6c-95e3-ef80d378fbdd/pull/0.log" Sep 30 21:05:49 crc kubenswrapper[4919]: I0930 21:05:49.551837 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg_1c228af8-0449-4b6c-95e3-ef80d378fbdd/pull/0.log" Sep 30 21:05:49 crc kubenswrapper[4919]: I0930 21:05:49.563963 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg_1c228af8-0449-4b6c-95e3-ef80d378fbdd/extract/0.log" Sep 30 21:05:49 crc kubenswrapper[4919]: I0930 21:05:49.607936 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694467dg_1c228af8-0449-4b6c-95e3-ef80d378fbdd/util/0.log" Sep 30 21:05:49 crc kubenswrapper[4919]: I0930 21:05:49.632815 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:05:49 crc kubenswrapper[4919]: E0930 21:05:49.633101 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:05:49 crc kubenswrapper[4919]: I0930 21:05:49.764977 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5_1c0928aa-abb7-42d8-888e-6990cd01c99a/util/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.024751 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5_1c0928aa-abb7-42d8-888e-6990cd01c99a/pull/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.049557 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5_1c0928aa-abb7-42d8-888e-6990cd01c99a/util/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.058301 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5_1c0928aa-abb7-42d8-888e-6990cd01c99a/pull/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.207983 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5_1c0928aa-abb7-42d8-888e-6990cd01c99a/pull/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.211509 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5_1c0928aa-abb7-42d8-888e-6990cd01c99a/util/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.299449 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcppnx5_1c0928aa-abb7-42d8-888e-6990cd01c99a/extract/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.339275 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:05:50 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:05:50 crc kubenswrapper[4919]: > Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.396875 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb_77efc011-2683-4ff8-80f9-be0b81c8c7f4/util/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.574206 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb_77efc011-2683-4ff8-80f9-be0b81c8c7f4/util/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.607757 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb_77efc011-2683-4ff8-80f9-be0b81c8c7f4/pull/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.628698 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb_77efc011-2683-4ff8-80f9-be0b81c8c7f4/pull/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.772472 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb_77efc011-2683-4ff8-80f9-be0b81c8c7f4/util/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.785577 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb_77efc011-2683-4ff8-80f9-be0b81c8c7f4/extract/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.800234 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_a6d815214afcb93f379916e45350d3de39072121f31a1d7eaaf6e22c2d88dhb_77efc011-2683-4ff8-80f9-be0b81c8c7f4/pull/0.log" Sep 30 21:05:50 crc kubenswrapper[4919]: I0930 21:05:50.966147 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_9a753e04-c280-40ea-bce3-2803f7a30e1d/util/0.log" Sep 30 21:05:51 crc kubenswrapper[4919]: I0930 21:05:51.134415 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_9a753e04-c280-40ea-bce3-2803f7a30e1d/pull/0.log" Sep 30 21:05:51 crc kubenswrapper[4919]: I0930 21:05:51.150763 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_9a753e04-c280-40ea-bce3-2803f7a30e1d/util/0.log" Sep 30 21:05:51 crc kubenswrapper[4919]: I0930 21:05:51.183175 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_9a753e04-c280-40ea-bce3-2803f7a30e1d/pull/0.log" Sep 30 21:05:51 crc kubenswrapper[4919]: I0930 21:05:51.387114 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_9a753e04-c280-40ea-bce3-2803f7a30e1d/pull/0.log" Sep 30 21:05:51 crc kubenswrapper[4919]: I0930 21:05:51.407723 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_9a753e04-c280-40ea-bce3-2803f7a30e1d/extract/0.log" Sep 30 21:05:51 crc kubenswrapper[4919]: I0930 21:05:51.441631 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_c03590272772b1d93899b6ceaa83703cf46dc8f83faf0e965a036060c02mrbj_9a753e04-c280-40ea-bce3-2803f7a30e1d/util/0.log" Sep 30 21:05:51 crc kubenswrapper[4919]: I0930 21:05:51.595424 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4gtrb_1dcc4f23-6453-44f6-943a-0a79f2f6e224/extract-utilities/0.log" Sep 30 21:05:51 crc kubenswrapper[4919]: I0930 21:05:51.764315 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4gtrb_1dcc4f23-6453-44f6-943a-0a79f2f6e224/extract-content/0.log" Sep 30 21:05:51 crc kubenswrapper[4919]: I0930 21:05:51.779897 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4gtrb_1dcc4f23-6453-44f6-943a-0a79f2f6e224/extract-utilities/0.log" Sep 30 21:05:51 crc kubenswrapper[4919]: I0930 21:05:51.781647 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4gtrb_1dcc4f23-6453-44f6-943a-0a79f2f6e224/extract-content/0.log" Sep 30 21:05:52 crc kubenswrapper[4919]: I0930 21:05:52.036137 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4gtrb_1dcc4f23-6453-44f6-943a-0a79f2f6e224/extract-utilities/0.log" Sep 30 21:05:52 crc kubenswrapper[4919]: I0930 21:05:52.040849 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4gtrb_1dcc4f23-6453-44f6-943a-0a79f2f6e224/extract-content/0.log" Sep 30 21:05:52 crc kubenswrapper[4919]: I0930 21:05:52.204036 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-76q7q_64dc7de7-32f2-49ae-9719-c347dd0f340a/extract-utilities/0.log" Sep 30 21:05:52 crc kubenswrapper[4919]: I0930 21:05:52.350513 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-76q7q_64dc7de7-32f2-49ae-9719-c347dd0f340a/extract-content/0.log" Sep 30 21:05:52 crc kubenswrapper[4919]: I0930 21:05:52.398535 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-76q7q_64dc7de7-32f2-49ae-9719-c347dd0f340a/extract-content/0.log" Sep 30 21:05:52 crc kubenswrapper[4919]: I0930 21:05:52.412288 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-76q7q_64dc7de7-32f2-49ae-9719-c347dd0f340a/extract-utilities/0.log" Sep 30 21:05:52 crc kubenswrapper[4919]: I0930 21:05:52.423015 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4gtrb_1dcc4f23-6453-44f6-943a-0a79f2f6e224/registry-server/0.log" Sep 30 21:05:52 crc kubenswrapper[4919]: I0930 21:05:52.832380 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-76q7q_64dc7de7-32f2-49ae-9719-c347dd0f340a/extract-utilities/0.log" Sep 30 21:05:52 crc kubenswrapper[4919]: I0930 21:05:52.839133 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-76q7q_64dc7de7-32f2-49ae-9719-c347dd0f340a/extract-content/0.log" Sep 30 21:05:52 crc kubenswrapper[4919]: I0930 21:05:52.917395 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_f9cc61b3-cd89-4636-a3dd-60788041f808/util/0.log" Sep 30 21:05:52 crc kubenswrapper[4919]: I0930 21:05:52.999020 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_f9cc61b3-cd89-4636-a3dd-60788041f808/pull/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.094657 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_f9cc61b3-cd89-4636-a3dd-60788041f808/util/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.143846 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_f9cc61b3-cd89-4636-a3dd-60788041f808/pull/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.280067 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_f9cc61b3-cd89-4636-a3dd-60788041f808/extract/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.343571 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:05:53 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:05:53 crc kubenswrapper[4919]: > Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.386472 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-76q7q_64dc7de7-32f2-49ae-9719-c347dd0f340a/registry-server/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.392082 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_f9cc61b3-cd89-4636-a3dd-60788041f808/pull/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.407029 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8d7c1038c65d2785a47a2ffcc15b07abd45421e7db92f3c296d966170bx872_f9cc61b3-cd89-4636-a3dd-60788041f808/util/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.504828 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr_ef6fff6c-fe79-4db3-a127-8d9938489f52/util/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.658141 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr_ef6fff6c-fe79-4db3-a127-8d9938489f52/pull/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.677452 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr_ef6fff6c-fe79-4db3-a127-8d9938489f52/pull/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.691297 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr_ef6fff6c-fe79-4db3-a127-8d9938489f52/util/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.906208 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr_ef6fff6c-fe79-4db3-a127-8d9938489f52/pull/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.926104 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr_ef6fff6c-fe79-4db3-a127-8d9938489f52/util/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.950201 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d967stmr_ef6fff6c-fe79-4db3-a127-8d9938489f52/extract/0.log" Sep 30 21:05:53 crc kubenswrapper[4919]: I0930 21:05:53.968519 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-v29xb_4f439d99-db12-43ef-bf75-48e46588d67b/marketplace-operator/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.137963 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pbrx9_2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50/extract-utilities/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.296953 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pbrx9_2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50/extract-utilities/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.351111 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pbrx9_2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50/extract-content/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.362637 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pbrx9_2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50/extract-content/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.480936 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pbrx9_2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50/extract-content/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.498309 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pbrx9_2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50/extract-utilities/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.616590 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pbrx9_2ce51e11-6e4a-4ffa-bf10-f8b8b50fda50/registry-server/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.624520 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f2vvm_f8b254d9-2040-4662-8949-eeeec8786ac3/extract-utilities/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.762015 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f2vvm_f8b254d9-2040-4662-8949-eeeec8786ac3/extract-content/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.775542 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f2vvm_f8b254d9-2040-4662-8949-eeeec8786ac3/extract-utilities/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.794111 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f2vvm_f8b254d9-2040-4662-8949-eeeec8786ac3/extract-content/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.967896 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f2vvm_f8b254d9-2040-4662-8949-eeeec8786ac3/extract-utilities/0.log" Sep 30 21:05:54 crc kubenswrapper[4919]: I0930 21:05:54.981769 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f2vvm_f8b254d9-2040-4662-8949-eeeec8786ac3/extract-content/0.log" Sep 30 21:05:55 crc kubenswrapper[4919]: I0930 21:05:55.394323 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-f2vvm_f8b254d9-2040-4662-8949-eeeec8786ac3/registry-server/0.log" Sep 30 21:05:56 crc kubenswrapper[4919]: I0930 21:05:56.332647 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:05:56 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:05:56 crc kubenswrapper[4919]: > Sep 30 21:05:56 crc kubenswrapper[4919]: I0930 21:05:56.332725 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-proc-0" Sep 30 21:05:56 crc kubenswrapper[4919]: I0930 21:05:56.333232 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cloudkitty-proc" containerStatusID={"Type":"cri-o","ID":"f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021"} pod="openstack/cloudkitty-proc-0" containerMessage="Container cloudkitty-proc failed liveness probe, will be restarted" Sep 30 21:05:56 crc kubenswrapper[4919]: I0930 21:05:56.333265 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" containerID="cri-o://f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" gracePeriod=30 Sep 30 21:05:56 crc kubenswrapper[4919]: I0930 21:05:56.682269 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:05:56 crc kubenswrapper[4919]: I0930 21:05:56.682685 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:05:56 crc kubenswrapper[4919]: E0930 21:05:56.682866 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:05:56 crc kubenswrapper[4919]: E0930 21:05:56.683197 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:05:56 crc kubenswrapper[4919]: E0930 21:05:56.861207 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:05:57 crc kubenswrapper[4919]: I0930 21:05:57.599757 4919 generic.go:334] "Generic (PLEG): container finished" podID="983be098-678e-4ecb-a684-7874ae171f14" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" exitCode=0 Sep 30 21:05:57 crc kubenswrapper[4919]: I0930 21:05:57.599802 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerDied","Data":"f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021"} Sep 30 21:05:57 crc kubenswrapper[4919]: I0930 21:05:57.599839 4919 scope.go:117] "RemoveContainer" containerID="b7460da01bd5daeb8155b69ab25e566f435010e559e757dc9f441429be581897" Sep 30 21:05:57 crc kubenswrapper[4919]: I0930 21:05:57.600713 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:05:57 crc kubenswrapper[4919]: E0930 21:05:57.600989 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:06:02 crc kubenswrapper[4919]: I0930 21:06:02.632934 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:06:02 crc kubenswrapper[4919]: E0930 21:06:02.633660 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:06:08 crc kubenswrapper[4919]: I0930 21:06:08.004251 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-7c8cf85677-qbk5f_4018daf1-fc20-4051-86f6-515140b17020/prometheus-operator/0.log" Sep 30 21:06:08 crc kubenswrapper[4919]: I0930 21:06:08.424853 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-75c47dfbd8-522kz_b530a7bf-2e7a-4396-9b34-38ae127ca22e/prometheus-operator-admission-webhook/0.log" Sep 30 21:06:08 crc kubenswrapper[4919]: I0930 21:06:08.460796 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-75c47dfbd8-bmtwx_4e6b1904-0dbb-41d6-8345-5e71f57442e2/prometheus-operator-admission-webhook/0.log" Sep 30 21:06:08 crc kubenswrapper[4919]: I0930 21:06:08.632847 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:06:08 crc kubenswrapper[4919]: E0930 21:06:08.633160 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:06:08 crc kubenswrapper[4919]: I0930 21:06:08.691284 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-54bc95c9fb-222zn_1701a4b5-dbbc-41a2-96ae-cc483f69e8b9/perses-operator/0.log" Sep 30 21:06:08 crc kubenswrapper[4919]: I0930 21:06:08.697661 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-cc5f78dfc-sbrnf_ada97c0d-8672-4535-a82d-aeb57a2b192d/operator/0.log" Sep 30 21:06:09 crc kubenswrapper[4919]: I0930 21:06:09.632183 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:06:09 crc kubenswrapper[4919]: E0930 21:06:09.632622 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:06:10 crc kubenswrapper[4919]: I0930 21:06:10.632581 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:06:10 crc kubenswrapper[4919]: E0930 21:06:10.632947 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:06:13 crc kubenswrapper[4919]: I0930 21:06:13.633442 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:06:13 crc kubenswrapper[4919]: E0930 21:06:13.634284 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:06:19 crc kubenswrapper[4919]: I0930 21:06:19.632203 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:06:19 crc kubenswrapper[4919]: E0930 21:06:19.632821 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:06:20 crc kubenswrapper[4919]: I0930 21:06:20.746568 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-69dd967c6d-g6dkf_bed3f41f-9f7d-4838-a3a2-3ed58371a416/manager/0.log" Sep 30 21:06:20 crc kubenswrapper[4919]: I0930 21:06:20.754097 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-69dd967c6d-g6dkf_bed3f41f-9f7d-4838-a3a2-3ed58371a416/kube-rbac-proxy/0.log" Sep 30 21:06:22 crc kubenswrapper[4919]: I0930 21:06:22.631763 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:06:22 crc kubenswrapper[4919]: I0930 21:06:22.632153 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:06:22 crc kubenswrapper[4919]: E0930 21:06:22.632159 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:06:22 crc kubenswrapper[4919]: E0930 21:06:22.632563 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:06:28 crc kubenswrapper[4919]: I0930 21:06:28.631800 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:06:29 crc kubenswrapper[4919]: I0930 21:06:29.931061 4919 generic.go:334] "Generic (PLEG): container finished" podID="0dde18df-d1bd-4b36-82af-cd0967cd942b" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" exitCode=2 Sep 30 21:06:29 crc kubenswrapper[4919]: I0930 21:06:29.931154 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" event={"ID":"0dde18df-d1bd-4b36-82af-cd0967cd942b","Type":"ContainerDied","Data":"14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae"} Sep 30 21:06:29 crc kubenswrapper[4919]: I0930 21:06:29.931690 4919 scope.go:117] "RemoveContainer" containerID="39d17ef091d5c2632da998281b7a272ee49210b50cfca3a60ffe85316495a587" Sep 30 21:06:29 crc kubenswrapper[4919]: I0930 21:06:29.932459 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:06:29 crc kubenswrapper[4919]: E0930 21:06:29.932751 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:06:32 crc kubenswrapper[4919]: I0930 21:06:32.633024 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:06:32 crc kubenswrapper[4919]: E0930 21:06:32.633827 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:06:33 crc kubenswrapper[4919]: I0930 21:06:33.632006 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:06:33 crc kubenswrapper[4919]: E0930 21:06:33.632343 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:06:34 crc kubenswrapper[4919]: I0930 21:06:34.592847 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:06:34 crc kubenswrapper[4919]: I0930 21:06:34.593987 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:06:34 crc kubenswrapper[4919]: E0930 21:06:34.594306 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:06:36 crc kubenswrapper[4919]: I0930 21:06:36.633506 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:06:36 crc kubenswrapper[4919]: E0930 21:06:36.634156 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:06:39 crc kubenswrapper[4919]: I0930 21:06:39.592200 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" Sep 30 21:06:39 crc kubenswrapper[4919]: I0930 21:06:39.593360 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:06:39 crc kubenswrapper[4919]: E0930 21:06:39.593638 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:06:44 crc kubenswrapper[4919]: I0930 21:06:44.633095 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:06:44 crc kubenswrapper[4919]: I0930 21:06:44.634226 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:06:44 crc kubenswrapper[4919]: E0930 21:06:44.634639 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:06:45 crc kubenswrapper[4919]: I0930 21:06:45.098929 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" event={"ID":"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3","Type":"ContainerStarted","Data":"cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac"} Sep 30 21:06:45 crc kubenswrapper[4919]: I0930 21:06:45.099983 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:06:45 crc kubenswrapper[4919]: I0930 21:06:45.102475 4919 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" containerName="gateway" probeResult="failure" output="Get \"https://10.217.0.251:8081/ready\": dial tcp 10.217.0.251:8081: connect: connection refused" Sep 30 21:06:46 crc kubenswrapper[4919]: I0930 21:06:46.117480 4919 generic.go:334] "Generic (PLEG): container finished" podID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" exitCode=2 Sep 30 21:06:46 crc kubenswrapper[4919]: I0930 21:06:46.117543 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" event={"ID":"53a8fa4c-0ba7-4e41-86e5-e4e767126bc3","Type":"ContainerDied","Data":"cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac"} Sep 30 21:06:46 crc kubenswrapper[4919]: I0930 21:06:46.117607 4919 scope.go:117] "RemoveContainer" containerID="33957de9c7c9c5d63f5668550f0ec07ac162860e1c39e1b5893e32f8467b7987" Sep 30 21:06:46 crc kubenswrapper[4919]: I0930 21:06:46.118347 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:06:46 crc kubenswrapper[4919]: E0930 21:06:46.118672 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:06:47 crc kubenswrapper[4919]: I0930 21:06:47.136386 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:06:47 crc kubenswrapper[4919]: E0930 21:06:47.137191 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:06:51 crc kubenswrapper[4919]: I0930 21:06:51.634035 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:06:51 crc kubenswrapper[4919]: E0930 21:06:51.634970 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:06:52 crc kubenswrapper[4919]: I0930 21:06:52.633685 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:06:52 crc kubenswrapper[4919]: E0930 21:06:52.634416 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:06:58 crc kubenswrapper[4919]: I0930 21:06:58.632087 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:06:58 crc kubenswrapper[4919]: E0930 21:06:58.632798 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:07:01 crc kubenswrapper[4919]: I0930 21:07:01.633631 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:07:01 crc kubenswrapper[4919]: E0930 21:07:01.634374 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:07:04 crc kubenswrapper[4919]: I0930 21:07:04.633196 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:07:04 crc kubenswrapper[4919]: E0930 21:07:04.634017 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:07:05 crc kubenswrapper[4919]: I0930 21:07:05.632742 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:07:05 crc kubenswrapper[4919]: E0930 21:07:05.633067 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:07:09 crc kubenswrapper[4919]: I0930 21:07:09.424002 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" Sep 30 21:07:09 crc kubenswrapper[4919]: I0930 21:07:09.424884 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:07:09 crc kubenswrapper[4919]: E0930 21:07:09.425158 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:07:12 crc kubenswrapper[4919]: I0930 21:07:12.635435 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:07:12 crc kubenswrapper[4919]: E0930 21:07:12.636398 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:07:19 crc kubenswrapper[4919]: I0930 21:07:19.633979 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:07:19 crc kubenswrapper[4919]: I0930 21:07:19.634808 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:07:19 crc kubenswrapper[4919]: E0930 21:07:19.635090 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-p4zv6_openshift-machine-config-operator(eb371a63-6d82-453e-930e-656710b97f10)\"" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" Sep 30 21:07:19 crc kubenswrapper[4919]: E0930 21:07:19.635290 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:07:24 crc kubenswrapper[4919]: I0930 21:07:24.633815 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:07:24 crc kubenswrapper[4919]: E0930 21:07:24.635580 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:07:27 crc kubenswrapper[4919]: I0930 21:07:27.632538 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:07:27 crc kubenswrapper[4919]: E0930 21:07:27.633481 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:07:32 crc kubenswrapper[4919]: I0930 21:07:32.632365 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:07:32 crc kubenswrapper[4919]: I0930 21:07:32.632942 4919 scope.go:117] "RemoveContainer" containerID="becf73c9eafd944b7eb8838eb8260e70cfad9d3c9ff9958e6e5a3a531981bdb5" Sep 30 21:07:32 crc kubenswrapper[4919]: E0930 21:07:32.633058 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:07:33 crc kubenswrapper[4919]: I0930 21:07:33.727689 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" event={"ID":"eb371a63-6d82-453e-930e-656710b97f10","Type":"ContainerStarted","Data":"046255c9fa57628275eff072ad3d45992d9d27b7ee714aefd0bd6e241d1162c1"} Sep 30 21:07:37 crc kubenswrapper[4919]: I0930 21:07:37.631856 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:07:37 crc kubenswrapper[4919]: E0930 21:07:37.632525 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:07:38 crc kubenswrapper[4919]: I0930 21:07:38.634203 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:07:38 crc kubenswrapper[4919]: E0930 21:07:38.635073 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:07:41 crc kubenswrapper[4919]: I0930 21:07:41.616491 4919 scope.go:117] "RemoveContainer" containerID="09af640df83d0ef1807067dc43c011a841ea3c7acf0e768f58ab60c4c08fc840" Sep 30 21:07:41 crc kubenswrapper[4919]: I0930 21:07:41.642534 4919 scope.go:117] "RemoveContainer" containerID="daeecf66698be80fe3c7574c410c95c80277fe3e8c672e767f6cfd54c4122920" Sep 30 21:07:41 crc kubenswrapper[4919]: I0930 21:07:41.672022 4919 scope.go:117] "RemoveContainer" containerID="eec54927e84a35714bf5b672947e80c2c5b4256f5745ec9e62afd0edb8252af7" Sep 30 21:07:41 crc kubenswrapper[4919]: I0930 21:07:41.692515 4919 scope.go:117] "RemoveContainer" containerID="6e4704d4dc34d5bad4423ee87075b553bebf0a480670ced4b26e894d4cfc2781" Sep 30 21:07:46 crc kubenswrapper[4919]: I0930 21:07:46.634255 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:07:46 crc kubenswrapper[4919]: E0930 21:07:46.636711 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:07:51 crc kubenswrapper[4919]: I0930 21:07:51.632784 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:07:51 crc kubenswrapper[4919]: E0930 21:07:51.633776 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:07:52 crc kubenswrapper[4919]: I0930 21:07:52.633053 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:07:52 crc kubenswrapper[4919]: E0930 21:07:52.633817 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:08:01 crc kubenswrapper[4919]: I0930 21:08:01.632176 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:08:01 crc kubenswrapper[4919]: E0930 21:08:01.633039 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:08:06 crc kubenswrapper[4919]: I0930 21:08:06.633612 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:08:06 crc kubenswrapper[4919]: E0930 21:08:06.634732 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:08:07 crc kubenswrapper[4919]: I0930 21:08:07.632934 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:08:07 crc kubenswrapper[4919]: E0930 21:08:07.633320 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:08:08 crc kubenswrapper[4919]: I0930 21:08:08.956195 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gtblb"] Sep 30 21:08:08 crc kubenswrapper[4919]: E0930 21:08:08.957786 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f352ff1f-8982-4146-a71a-66b7f558f16b" containerName="container-00" Sep 30 21:08:08 crc kubenswrapper[4919]: I0930 21:08:08.957813 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="f352ff1f-8982-4146-a71a-66b7f558f16b" containerName="container-00" Sep 30 21:08:08 crc kubenswrapper[4919]: I0930 21:08:08.958452 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="f352ff1f-8982-4146-a71a-66b7f558f16b" containerName="container-00" Sep 30 21:08:08 crc kubenswrapper[4919]: I0930 21:08:08.964069 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:08 crc kubenswrapper[4919]: I0930 21:08:08.977245 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gtblb"] Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.097646 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b055ec16-dd43-4627-96e1-13f04f5bcd88-catalog-content\") pod \"community-operators-gtblb\" (UID: \"b055ec16-dd43-4627-96e1-13f04f5bcd88\") " pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.097758 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b055ec16-dd43-4627-96e1-13f04f5bcd88-utilities\") pod \"community-operators-gtblb\" (UID: \"b055ec16-dd43-4627-96e1-13f04f5bcd88\") " pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.097804 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4bn8\" (UniqueName: \"kubernetes.io/projected/b055ec16-dd43-4627-96e1-13f04f5bcd88-kube-api-access-h4bn8\") pod \"community-operators-gtblb\" (UID: \"b055ec16-dd43-4627-96e1-13f04f5bcd88\") " pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.141989 4919 generic.go:334] "Generic (PLEG): container finished" podID="637f326d-ce6d-43e0-a286-3619fa7bda84" containerID="2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e" exitCode=0 Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.142052 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-25ld5/must-gather-cg45r" event={"ID":"637f326d-ce6d-43e0-a286-3619fa7bda84","Type":"ContainerDied","Data":"2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e"} Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.142866 4919 scope.go:117] "RemoveContainer" containerID="2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e" Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.200488 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b055ec16-dd43-4627-96e1-13f04f5bcd88-utilities\") pod \"community-operators-gtblb\" (UID: \"b055ec16-dd43-4627-96e1-13f04f5bcd88\") " pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.201318 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b055ec16-dd43-4627-96e1-13f04f5bcd88-utilities\") pod \"community-operators-gtblb\" (UID: \"b055ec16-dd43-4627-96e1-13f04f5bcd88\") " pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.201875 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4bn8\" (UniqueName: \"kubernetes.io/projected/b055ec16-dd43-4627-96e1-13f04f5bcd88-kube-api-access-h4bn8\") pod \"community-operators-gtblb\" (UID: \"b055ec16-dd43-4627-96e1-13f04f5bcd88\") " pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.202308 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b055ec16-dd43-4627-96e1-13f04f5bcd88-catalog-content\") pod \"community-operators-gtblb\" (UID: \"b055ec16-dd43-4627-96e1-13f04f5bcd88\") " pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.202879 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b055ec16-dd43-4627-96e1-13f04f5bcd88-catalog-content\") pod \"community-operators-gtblb\" (UID: \"b055ec16-dd43-4627-96e1-13f04f5bcd88\") " pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.228808 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4bn8\" (UniqueName: \"kubernetes.io/projected/b055ec16-dd43-4627-96e1-13f04f5bcd88-kube-api-access-h4bn8\") pod \"community-operators-gtblb\" (UID: \"b055ec16-dd43-4627-96e1-13f04f5bcd88\") " pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.296142 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:09 crc kubenswrapper[4919]: W0930 21:08:09.868523 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb055ec16_dd43_4627_96e1_13f04f5bcd88.slice/crio-388c3e5fa66a9ba6f56c56ec04d9f125ec2a836b1fdb9831ac2574de0b60cdde WatchSource:0}: Error finding container 388c3e5fa66a9ba6f56c56ec04d9f125ec2a836b1fdb9831ac2574de0b60cdde: Status 404 returned error can't find the container with id 388c3e5fa66a9ba6f56c56ec04d9f125ec2a836b1fdb9831ac2574de0b60cdde Sep 30 21:08:09 crc kubenswrapper[4919]: I0930 21:08:09.872955 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gtblb"] Sep 30 21:08:10 crc kubenswrapper[4919]: I0930 21:08:10.006144 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-25ld5_must-gather-cg45r_637f326d-ce6d-43e0-a286-3619fa7bda84/gather/0.log" Sep 30 21:08:10 crc kubenswrapper[4919]: I0930 21:08:10.169648 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtblb" event={"ID":"b055ec16-dd43-4627-96e1-13f04f5bcd88","Type":"ContainerStarted","Data":"388c3e5fa66a9ba6f56c56ec04d9f125ec2a836b1fdb9831ac2574de0b60cdde"} Sep 30 21:08:11 crc kubenswrapper[4919]: I0930 21:08:11.181647 4919 generic.go:334] "Generic (PLEG): container finished" podID="b055ec16-dd43-4627-96e1-13f04f5bcd88" containerID="32d2dba5b0ebaee584f7e9ae52b0a360cd81720a151bc0290161715e50c334f5" exitCode=0 Sep 30 21:08:11 crc kubenswrapper[4919]: I0930 21:08:11.181864 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtblb" event={"ID":"b055ec16-dd43-4627-96e1-13f04f5bcd88","Type":"ContainerDied","Data":"32d2dba5b0ebaee584f7e9ae52b0a360cd81720a151bc0290161715e50c334f5"} Sep 30 21:08:11 crc kubenswrapper[4919]: I0930 21:08:11.184786 4919 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 30 21:08:12 crc kubenswrapper[4919]: I0930 21:08:12.205151 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtblb" event={"ID":"b055ec16-dd43-4627-96e1-13f04f5bcd88","Type":"ContainerStarted","Data":"886b47a09a11e8ac2ed247c282c09adf756fad450a2937161e387a1d10c21b06"} Sep 30 21:08:12 crc kubenswrapper[4919]: I0930 21:08:12.632028 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:08:12 crc kubenswrapper[4919]: E0930 21:08:12.632461 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:08:14 crc kubenswrapper[4919]: I0930 21:08:14.226711 4919 generic.go:334] "Generic (PLEG): container finished" podID="b055ec16-dd43-4627-96e1-13f04f5bcd88" containerID="886b47a09a11e8ac2ed247c282c09adf756fad450a2937161e387a1d10c21b06" exitCode=0 Sep 30 21:08:14 crc kubenswrapper[4919]: I0930 21:08:14.227180 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtblb" event={"ID":"b055ec16-dd43-4627-96e1-13f04f5bcd88","Type":"ContainerDied","Data":"886b47a09a11e8ac2ed247c282c09adf756fad450a2937161e387a1d10c21b06"} Sep 30 21:08:16 crc kubenswrapper[4919]: I0930 21:08:16.254299 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtblb" event={"ID":"b055ec16-dd43-4627-96e1-13f04f5bcd88","Type":"ContainerStarted","Data":"34b842d24710e74fce580177e006398929917a464ab4a9de3c6a5961ec0c88db"} Sep 30 21:08:16 crc kubenswrapper[4919]: I0930 21:08:16.287678 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gtblb" podStartSLOduration=4.387948723 podStartE2EDuration="8.287639256s" podCreationTimestamp="2025-09-30 21:08:08 +0000 UTC" firstStartedPulling="2025-09-30 21:08:11.184560103 +0000 UTC m=+3276.300593230" lastFinishedPulling="2025-09-30 21:08:15.084250635 +0000 UTC m=+3280.200283763" observedRunningTime="2025-09-30 21:08:16.273822466 +0000 UTC m=+3281.389855613" watchObservedRunningTime="2025-09-30 21:08:16.287639256 +0000 UTC m=+3281.403672423" Sep 30 21:08:17 crc kubenswrapper[4919]: I0930 21:08:17.632154 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:08:17 crc kubenswrapper[4919]: E0930 21:08:17.632738 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:08:17 crc kubenswrapper[4919]: I0930 21:08:17.793650 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-25ld5/must-gather-cg45r"] Sep 30 21:08:17 crc kubenswrapper[4919]: I0930 21:08:17.793972 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-25ld5/must-gather-cg45r" podUID="637f326d-ce6d-43e0-a286-3619fa7bda84" containerName="copy" containerID="cri-o://7b778bec46597d05b7792806bcfe0d94bcdfb34c71cfce5c9556d2df1fd3e192" gracePeriod=2 Sep 30 21:08:17 crc kubenswrapper[4919]: I0930 21:08:17.804438 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-25ld5/must-gather-cg45r"] Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.278978 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-25ld5_must-gather-cg45r_637f326d-ce6d-43e0-a286-3619fa7bda84/copy/0.log" Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.279015 4919 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-25ld5_must-gather-cg45r_637f326d-ce6d-43e0-a286-3619fa7bda84/copy/0.log" Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.279697 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/must-gather-cg45r" Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.279790 4919 generic.go:334] "Generic (PLEG): container finished" podID="637f326d-ce6d-43e0-a286-3619fa7bda84" containerID="7b778bec46597d05b7792806bcfe0d94bcdfb34c71cfce5c9556d2df1fd3e192" exitCode=143 Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.279843 4919 scope.go:117] "RemoveContainer" containerID="7b778bec46597d05b7792806bcfe0d94bcdfb34c71cfce5c9556d2df1fd3e192" Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.296719 4919 scope.go:117] "RemoveContainer" containerID="2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e" Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.301874 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/637f326d-ce6d-43e0-a286-3619fa7bda84-must-gather-output\") pod \"637f326d-ce6d-43e0-a286-3619fa7bda84\" (UID: \"637f326d-ce6d-43e0-a286-3619fa7bda84\") " Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.301923 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrv4g\" (UniqueName: \"kubernetes.io/projected/637f326d-ce6d-43e0-a286-3619fa7bda84-kube-api-access-mrv4g\") pod \"637f326d-ce6d-43e0-a286-3619fa7bda84\" (UID: \"637f326d-ce6d-43e0-a286-3619fa7bda84\") " Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.308617 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/637f326d-ce6d-43e0-a286-3619fa7bda84-kube-api-access-mrv4g" (OuterVolumeSpecName: "kube-api-access-mrv4g") pod "637f326d-ce6d-43e0-a286-3619fa7bda84" (UID: "637f326d-ce6d-43e0-a286-3619fa7bda84"). InnerVolumeSpecName "kube-api-access-mrv4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.393335 4919 scope.go:117] "RemoveContainer" containerID="7b778bec46597d05b7792806bcfe0d94bcdfb34c71cfce5c9556d2df1fd3e192" Sep 30 21:08:18 crc kubenswrapper[4919]: E0930 21:08:18.393901 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b778bec46597d05b7792806bcfe0d94bcdfb34c71cfce5c9556d2df1fd3e192\": container with ID starting with 7b778bec46597d05b7792806bcfe0d94bcdfb34c71cfce5c9556d2df1fd3e192 not found: ID does not exist" containerID="7b778bec46597d05b7792806bcfe0d94bcdfb34c71cfce5c9556d2df1fd3e192" Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.394028 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b778bec46597d05b7792806bcfe0d94bcdfb34c71cfce5c9556d2df1fd3e192"} err="failed to get container status \"7b778bec46597d05b7792806bcfe0d94bcdfb34c71cfce5c9556d2df1fd3e192\": rpc error: code = NotFound desc = could not find container \"7b778bec46597d05b7792806bcfe0d94bcdfb34c71cfce5c9556d2df1fd3e192\": container with ID starting with 7b778bec46597d05b7792806bcfe0d94bcdfb34c71cfce5c9556d2df1fd3e192 not found: ID does not exist" Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.394136 4919 scope.go:117] "RemoveContainer" containerID="2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e" Sep 30 21:08:18 crc kubenswrapper[4919]: E0930 21:08:18.395838 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e\": container with ID starting with 2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e not found: ID does not exist" containerID="2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e" Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.395883 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e"} err="failed to get container status \"2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e\": rpc error: code = NotFound desc = could not find container \"2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e\": container with ID starting with 2ffdbc531eac2edf4425670548b36761a61b3b2917ef34b6c0e153a32ab3849e not found: ID does not exist" Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.404732 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrv4g\" (UniqueName: \"kubernetes.io/projected/637f326d-ce6d-43e0-a286-3619fa7bda84-kube-api-access-mrv4g\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.469039 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/637f326d-ce6d-43e0-a286-3619fa7bda84-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "637f326d-ce6d-43e0-a286-3619fa7bda84" (UID: "637f326d-ce6d-43e0-a286-3619fa7bda84"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:08:18 crc kubenswrapper[4919]: I0930 21:08:18.506591 4919 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/637f326d-ce6d-43e0-a286-3619fa7bda84-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:19 crc kubenswrapper[4919]: I0930 21:08:19.290859 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-25ld5/must-gather-cg45r" Sep 30 21:08:19 crc kubenswrapper[4919]: I0930 21:08:19.296889 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:19 crc kubenswrapper[4919]: I0930 21:08:19.296923 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:19 crc kubenswrapper[4919]: I0930 21:08:19.349808 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:19 crc kubenswrapper[4919]: I0930 21:08:19.644745 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="637f326d-ce6d-43e0-a286-3619fa7bda84" path="/var/lib/kubelet/pods/637f326d-ce6d-43e0-a286-3619fa7bda84/volumes" Sep 30 21:08:20 crc kubenswrapper[4919]: I0930 21:08:20.376597 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:20 crc kubenswrapper[4919]: I0930 21:08:20.427730 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gtblb"] Sep 30 21:08:21 crc kubenswrapper[4919]: I0930 21:08:21.635130 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:08:21 crc kubenswrapper[4919]: E0930 21:08:21.635649 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:08:22 crc kubenswrapper[4919]: I0930 21:08:22.321484 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gtblb" podUID="b055ec16-dd43-4627-96e1-13f04f5bcd88" containerName="registry-server" containerID="cri-o://34b842d24710e74fce580177e006398929917a464ab4a9de3c6a5961ec0c88db" gracePeriod=2 Sep 30 21:08:23 crc kubenswrapper[4919]: I0930 21:08:23.337493 4919 generic.go:334] "Generic (PLEG): container finished" podID="b055ec16-dd43-4627-96e1-13f04f5bcd88" containerID="34b842d24710e74fce580177e006398929917a464ab4a9de3c6a5961ec0c88db" exitCode=0 Sep 30 21:08:23 crc kubenswrapper[4919]: I0930 21:08:23.337588 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtblb" event={"ID":"b055ec16-dd43-4627-96e1-13f04f5bcd88","Type":"ContainerDied","Data":"34b842d24710e74fce580177e006398929917a464ab4a9de3c6a5961ec0c88db"} Sep 30 21:08:23 crc kubenswrapper[4919]: I0930 21:08:23.337863 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtblb" event={"ID":"b055ec16-dd43-4627-96e1-13f04f5bcd88","Type":"ContainerDied","Data":"388c3e5fa66a9ba6f56c56ec04d9f125ec2a836b1fdb9831ac2574de0b60cdde"} Sep 30 21:08:23 crc kubenswrapper[4919]: I0930 21:08:23.337883 4919 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="388c3e5fa66a9ba6f56c56ec04d9f125ec2a836b1fdb9831ac2574de0b60cdde" Sep 30 21:08:23 crc kubenswrapper[4919]: I0930 21:08:23.401754 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:23 crc kubenswrapper[4919]: I0930 21:08:23.511733 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b055ec16-dd43-4627-96e1-13f04f5bcd88-utilities\") pod \"b055ec16-dd43-4627-96e1-13f04f5bcd88\" (UID: \"b055ec16-dd43-4627-96e1-13f04f5bcd88\") " Sep 30 21:08:23 crc kubenswrapper[4919]: I0930 21:08:23.511883 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b055ec16-dd43-4627-96e1-13f04f5bcd88-catalog-content\") pod \"b055ec16-dd43-4627-96e1-13f04f5bcd88\" (UID: \"b055ec16-dd43-4627-96e1-13f04f5bcd88\") " Sep 30 21:08:23 crc kubenswrapper[4919]: I0930 21:08:23.511905 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4bn8\" (UniqueName: \"kubernetes.io/projected/b055ec16-dd43-4627-96e1-13f04f5bcd88-kube-api-access-h4bn8\") pod \"b055ec16-dd43-4627-96e1-13f04f5bcd88\" (UID: \"b055ec16-dd43-4627-96e1-13f04f5bcd88\") " Sep 30 21:08:23 crc kubenswrapper[4919]: I0930 21:08:23.513659 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b055ec16-dd43-4627-96e1-13f04f5bcd88-utilities" (OuterVolumeSpecName: "utilities") pod "b055ec16-dd43-4627-96e1-13f04f5bcd88" (UID: "b055ec16-dd43-4627-96e1-13f04f5bcd88"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:08:23 crc kubenswrapper[4919]: I0930 21:08:23.518524 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b055ec16-dd43-4627-96e1-13f04f5bcd88-kube-api-access-h4bn8" (OuterVolumeSpecName: "kube-api-access-h4bn8") pod "b055ec16-dd43-4627-96e1-13f04f5bcd88" (UID: "b055ec16-dd43-4627-96e1-13f04f5bcd88"). InnerVolumeSpecName "kube-api-access-h4bn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:08:23 crc kubenswrapper[4919]: I0930 21:08:23.614557 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b055ec16-dd43-4627-96e1-13f04f5bcd88-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:23 crc kubenswrapper[4919]: I0930 21:08:23.614592 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4bn8\" (UniqueName: \"kubernetes.io/projected/b055ec16-dd43-4627-96e1-13f04f5bcd88-kube-api-access-h4bn8\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:24 crc kubenswrapper[4919]: I0930 21:08:24.313771 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b055ec16-dd43-4627-96e1-13f04f5bcd88-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b055ec16-dd43-4627-96e1-13f04f5bcd88" (UID: "b055ec16-dd43-4627-96e1-13f04f5bcd88"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:08:24 crc kubenswrapper[4919]: I0930 21:08:24.329037 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b055ec16-dd43-4627-96e1-13f04f5bcd88-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:08:24 crc kubenswrapper[4919]: I0930 21:08:24.349792 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtblb" Sep 30 21:08:24 crc kubenswrapper[4919]: I0930 21:08:24.394740 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gtblb"] Sep 30 21:08:24 crc kubenswrapper[4919]: I0930 21:08:24.410065 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gtblb"] Sep 30 21:08:25 crc kubenswrapper[4919]: I0930 21:08:25.659792 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b055ec16-dd43-4627-96e1-13f04f5bcd88" path="/var/lib/kubelet/pods/b055ec16-dd43-4627-96e1-13f04f5bcd88/volumes" Sep 30 21:08:27 crc kubenswrapper[4919]: I0930 21:08:27.632577 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:08:27 crc kubenswrapper[4919]: E0930 21:08:27.634185 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:08:29 crc kubenswrapper[4919]: I0930 21:08:29.633253 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:08:29 crc kubenswrapper[4919]: E0930 21:08:29.634206 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:08:32 crc kubenswrapper[4919]: I0930 21:08:32.632766 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:08:32 crc kubenswrapper[4919]: E0930 21:08:32.633645 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 2m40s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:08:40 crc kubenswrapper[4919]: I0930 21:08:40.633665 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:08:40 crc kubenswrapper[4919]: E0930 21:08:40.634912 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:08:41 crc kubenswrapper[4919]: I0930 21:08:41.754743 4919 scope.go:117] "RemoveContainer" containerID="bb6f4e140344374c5dec36c318fbfbe7fd58f4dd9c2623937391563f14bdc2dd" Sep 30 21:08:43 crc kubenswrapper[4919]: I0930 21:08:43.633476 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:08:43 crc kubenswrapper[4919]: I0930 21:08:43.634580 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:08:43 crc kubenswrapper[4919]: E0930 21:08:43.634883 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:08:44 crc kubenswrapper[4919]: I0930 21:08:44.574736 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerStarted","Data":"5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7"} Sep 30 21:08:47 crc kubenswrapper[4919]: I0930 21:08:47.339479 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:08:47 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:08:47 crc kubenswrapper[4919]: > Sep 30 21:08:50 crc kubenswrapper[4919]: I0930 21:08:50.381639 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:08:50 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:08:50 crc kubenswrapper[4919]: > Sep 30 21:08:53 crc kubenswrapper[4919]: I0930 21:08:53.333494 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" probeResult="failure" output=< Sep 30 21:08:53 crc kubenswrapper[4919]: Process cloudkitty-proc not found Sep 30 21:08:53 crc kubenswrapper[4919]: > Sep 30 21:08:53 crc kubenswrapper[4919]: I0930 21:08:53.333841 4919 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/cloudkitty-proc-0" Sep 30 21:08:53 crc kubenswrapper[4919]: I0930 21:08:53.334609 4919 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cloudkitty-proc" containerStatusID={"Type":"cri-o","ID":"5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7"} pod="openstack/cloudkitty-proc-0" containerMessage="Container cloudkitty-proc failed liveness probe, will be restarted" Sep 30 21:08:53 crc kubenswrapper[4919]: I0930 21:08:53.334638 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" containerName="cloudkitty-proc" containerID="cri-o://5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7" gracePeriod=30 Sep 30 21:08:53 crc kubenswrapper[4919]: I0930 21:08:53.633559 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:08:53 crc kubenswrapper[4919]: E0930 21:08:53.633978 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:08:53 crc kubenswrapper[4919]: E0930 21:08:53.759774 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:08:53 crc kubenswrapper[4919]: E0930 21:08:53.870531 4919 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod983be098_678e_4ecb_a684_7874ae171f14.slice/crio-5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod983be098_678e_4ecb_a684_7874ae171f14.slice/crio-conmon-5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7.scope\": RecentStats: unable to find data in memory cache]" Sep 30 21:08:54 crc kubenswrapper[4919]: I0930 21:08:54.722363 4919 generic.go:334] "Generic (PLEG): container finished" podID="983be098-678e-4ecb-a684-7874ae171f14" containerID="5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7" exitCode=0 Sep 30 21:08:54 crc kubenswrapper[4919]: I0930 21:08:54.723291 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"983be098-678e-4ecb-a684-7874ae171f14","Type":"ContainerDied","Data":"5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7"} Sep 30 21:08:54 crc kubenswrapper[4919]: I0930 21:08:54.723442 4919 scope.go:117] "RemoveContainer" containerID="f268c4c5d479b19c59135813c9f7e1a5a6f9ef2f94cc19b54ccd672b5025e021" Sep 30 21:08:54 crc kubenswrapper[4919]: I0930 21:08:54.724302 4919 scope.go:117] "RemoveContainer" containerID="5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7" Sep 30 21:08:54 crc kubenswrapper[4919]: E0930 21:08:54.725055 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:08:56 crc kubenswrapper[4919]: I0930 21:08:56.632616 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:08:56 crc kubenswrapper[4919]: E0930 21:08:56.633647 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:09:05 crc kubenswrapper[4919]: I0930 21:09:05.633365 4919 scope.go:117] "RemoveContainer" containerID="5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7" Sep 30 21:09:05 crc kubenswrapper[4919]: E0930 21:09:05.634655 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:09:06 crc kubenswrapper[4919]: I0930 21:09:06.632918 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:09:06 crc kubenswrapper[4919]: E0930 21:09:06.633424 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:09:08 crc kubenswrapper[4919]: I0930 21:09:08.633376 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:09:08 crc kubenswrapper[4919]: E0930 21:09:08.634386 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:09:17 crc kubenswrapper[4919]: I0930 21:09:17.638433 4919 scope.go:117] "RemoveContainer" containerID="5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7" Sep 30 21:09:17 crc kubenswrapper[4919]: E0930 21:09:17.641940 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:09:20 crc kubenswrapper[4919]: I0930 21:09:20.633640 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:09:20 crc kubenswrapper[4919]: E0930 21:09:20.634366 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:09:22 crc kubenswrapper[4919]: I0930 21:09:22.632813 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:09:22 crc kubenswrapper[4919]: E0930 21:09:22.633449 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:09:32 crc kubenswrapper[4919]: I0930 21:09:32.632914 4919 scope.go:117] "RemoveContainer" containerID="5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7" Sep 30 21:09:32 crc kubenswrapper[4919]: E0930 21:09:32.634057 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:09:34 crc kubenswrapper[4919]: I0930 21:09:34.631987 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:09:34 crc kubenswrapper[4919]: E0930 21:09:34.632881 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:09:36 crc kubenswrapper[4919]: I0930 21:09:36.632132 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:09:36 crc kubenswrapper[4919]: E0930 21:09:36.633332 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:09:44 crc kubenswrapper[4919]: I0930 21:09:44.632348 4919 scope.go:117] "RemoveContainer" containerID="5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7" Sep 30 21:09:44 crc kubenswrapper[4919]: E0930 21:09:44.634179 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:09:45 crc kubenswrapper[4919]: I0930 21:09:45.645810 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:09:45 crc kubenswrapper[4919]: E0930 21:09:45.646428 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.192513 4919 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vrf7q"] Sep 30 21:09:46 crc kubenswrapper[4919]: E0930 21:09:46.194629 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637f326d-ce6d-43e0-a286-3619fa7bda84" containerName="copy" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.194658 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="637f326d-ce6d-43e0-a286-3619fa7bda84" containerName="copy" Sep 30 21:09:46 crc kubenswrapper[4919]: E0930 21:09:46.194695 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b055ec16-dd43-4627-96e1-13f04f5bcd88" containerName="registry-server" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.194707 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="b055ec16-dd43-4627-96e1-13f04f5bcd88" containerName="registry-server" Sep 30 21:09:46 crc kubenswrapper[4919]: E0930 21:09:46.194773 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b055ec16-dd43-4627-96e1-13f04f5bcd88" containerName="extract-utilities" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.194785 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="b055ec16-dd43-4627-96e1-13f04f5bcd88" containerName="extract-utilities" Sep 30 21:09:46 crc kubenswrapper[4919]: E0930 21:09:46.194816 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637f326d-ce6d-43e0-a286-3619fa7bda84" containerName="gather" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.194830 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="637f326d-ce6d-43e0-a286-3619fa7bda84" containerName="gather" Sep 30 21:09:46 crc kubenswrapper[4919]: E0930 21:09:46.194843 4919 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b055ec16-dd43-4627-96e1-13f04f5bcd88" containerName="extract-content" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.194851 4919 state_mem.go:107] "Deleted CPUSet assignment" podUID="b055ec16-dd43-4627-96e1-13f04f5bcd88" containerName="extract-content" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.195427 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="637f326d-ce6d-43e0-a286-3619fa7bda84" containerName="gather" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.195480 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="637f326d-ce6d-43e0-a286-3619fa7bda84" containerName="copy" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.195511 4919 memory_manager.go:354] "RemoveStaleState removing state" podUID="b055ec16-dd43-4627-96e1-13f04f5bcd88" containerName="registry-server" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.212420 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vrf7q"] Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.212901 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.367586 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-utilities\") pod \"redhat-operators-vrf7q\" (UID: \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\") " pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.367902 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn2j7\" (UniqueName: \"kubernetes.io/projected/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-kube-api-access-nn2j7\") pod \"redhat-operators-vrf7q\" (UID: \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\") " pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.367971 4919 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-catalog-content\") pod \"redhat-operators-vrf7q\" (UID: \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\") " pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.469736 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-utilities\") pod \"redhat-operators-vrf7q\" (UID: \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\") " pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.470066 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn2j7\" (UniqueName: \"kubernetes.io/projected/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-kube-api-access-nn2j7\") pod \"redhat-operators-vrf7q\" (UID: \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\") " pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.470236 4919 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-catalog-content\") pod \"redhat-operators-vrf7q\" (UID: \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\") " pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.470589 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-catalog-content\") pod \"redhat-operators-vrf7q\" (UID: \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\") " pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.471276 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-utilities\") pod \"redhat-operators-vrf7q\" (UID: \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\") " pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.491279 4919 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn2j7\" (UniqueName: \"kubernetes.io/projected/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-kube-api-access-nn2j7\") pod \"redhat-operators-vrf7q\" (UID: \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\") " pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:46 crc kubenswrapper[4919]: I0930 21:09:46.535932 4919 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:47 crc kubenswrapper[4919]: W0930 21:09:47.017135 4919 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b41e63f_5eab_4b5f_afdc_b28569ff0e65.slice/crio-9a79186f738a1a7b3bf1857ebe67c833bc1c02274208dd3be078696adee99c59 WatchSource:0}: Error finding container 9a79186f738a1a7b3bf1857ebe67c833bc1c02274208dd3be078696adee99c59: Status 404 returned error can't find the container with id 9a79186f738a1a7b3bf1857ebe67c833bc1c02274208dd3be078696adee99c59 Sep 30 21:09:47 crc kubenswrapper[4919]: I0930 21:09:47.021126 4919 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vrf7q"] Sep 30 21:09:47 crc kubenswrapper[4919]: I0930 21:09:47.398369 4919 generic.go:334] "Generic (PLEG): container finished" podID="5b41e63f-5eab-4b5f-afdc-b28569ff0e65" containerID="ab4095bff410ada9e3c3b513d0227ae8238f5b39bd8338ecf30ee3b045ff4600" exitCode=0 Sep 30 21:09:47 crc kubenswrapper[4919]: I0930 21:09:47.398439 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrf7q" event={"ID":"5b41e63f-5eab-4b5f-afdc-b28569ff0e65","Type":"ContainerDied","Data":"ab4095bff410ada9e3c3b513d0227ae8238f5b39bd8338ecf30ee3b045ff4600"} Sep 30 21:09:47 crc kubenswrapper[4919]: I0930 21:09:47.398697 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrf7q" event={"ID":"5b41e63f-5eab-4b5f-afdc-b28569ff0e65","Type":"ContainerStarted","Data":"9a79186f738a1a7b3bf1857ebe67c833bc1c02274208dd3be078696adee99c59"} Sep 30 21:09:49 crc kubenswrapper[4919]: I0930 21:09:49.425071 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrf7q" event={"ID":"5b41e63f-5eab-4b5f-afdc-b28569ff0e65","Type":"ContainerStarted","Data":"41226cdd2cc4264d2f0818db27e258e29b19a3eb9893ab4e90ea94a20062e336"} Sep 30 21:09:49 crc kubenswrapper[4919]: I0930 21:09:49.632600 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:09:49 crc kubenswrapper[4919]: E0930 21:09:49.632818 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:09:51 crc kubenswrapper[4919]: I0930 21:09:51.445998 4919 generic.go:334] "Generic (PLEG): container finished" podID="5b41e63f-5eab-4b5f-afdc-b28569ff0e65" containerID="41226cdd2cc4264d2f0818db27e258e29b19a3eb9893ab4e90ea94a20062e336" exitCode=0 Sep 30 21:09:51 crc kubenswrapper[4919]: I0930 21:09:51.446078 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrf7q" event={"ID":"5b41e63f-5eab-4b5f-afdc-b28569ff0e65","Type":"ContainerDied","Data":"41226cdd2cc4264d2f0818db27e258e29b19a3eb9893ab4e90ea94a20062e336"} Sep 30 21:09:52 crc kubenswrapper[4919]: I0930 21:09:52.457298 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrf7q" event={"ID":"5b41e63f-5eab-4b5f-afdc-b28569ff0e65","Type":"ContainerStarted","Data":"0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65"} Sep 30 21:09:52 crc kubenswrapper[4919]: I0930 21:09:52.481530 4919 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vrf7q" podStartSLOduration=2.041217729 podStartE2EDuration="6.481510021s" podCreationTimestamp="2025-09-30 21:09:46 +0000 UTC" firstStartedPulling="2025-09-30 21:09:47.400965718 +0000 UTC m=+3372.516998855" lastFinishedPulling="2025-09-30 21:09:51.84125802 +0000 UTC m=+3376.957291147" observedRunningTime="2025-09-30 21:09:52.475122836 +0000 UTC m=+3377.591155963" watchObservedRunningTime="2025-09-30 21:09:52.481510021 +0000 UTC m=+3377.597543148" Sep 30 21:09:56 crc kubenswrapper[4919]: I0930 21:09:56.062459 4919 patch_prober.go:28] interesting pod/machine-config-daemon-p4zv6 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 30 21:09:56 crc kubenswrapper[4919]: I0930 21:09:56.063129 4919 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-p4zv6" podUID="eb371a63-6d82-453e-930e-656710b97f10" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 30 21:09:56 crc kubenswrapper[4919]: I0930 21:09:56.536265 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:56 crc kubenswrapper[4919]: I0930 21:09:56.536313 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:56 crc kubenswrapper[4919]: I0930 21:09:56.605843 4919 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:56 crc kubenswrapper[4919]: I0930 21:09:56.632633 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:09:56 crc kubenswrapper[4919]: E0930 21:09:56.632874 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:09:57 crc kubenswrapper[4919]: I0930 21:09:57.567278 4919 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:09:57 crc kubenswrapper[4919]: I0930 21:09:57.626996 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vrf7q"] Sep 30 21:09:57 crc kubenswrapper[4919]: I0930 21:09:57.632623 4919 scope.go:117] "RemoveContainer" containerID="5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7" Sep 30 21:09:57 crc kubenswrapper[4919]: E0930 21:09:57.632918 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:09:59 crc kubenswrapper[4919]: I0930 21:09:59.540073 4919 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vrf7q" podUID="5b41e63f-5eab-4b5f-afdc-b28569ff0e65" containerName="registry-server" containerID="cri-o://0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65" gracePeriod=2 Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.048202 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.175918 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-catalog-content\") pod \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\" (UID: \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\") " Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.176080 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nn2j7\" (UniqueName: \"kubernetes.io/projected/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-kube-api-access-nn2j7\") pod \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\" (UID: \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\") " Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.176283 4919 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-utilities\") pod \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\" (UID: \"5b41e63f-5eab-4b5f-afdc-b28569ff0e65\") " Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.176985 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-utilities" (OuterVolumeSpecName: "utilities") pod "5b41e63f-5eab-4b5f-afdc-b28569ff0e65" (UID: "5b41e63f-5eab-4b5f-afdc-b28569ff0e65"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.181941 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-kube-api-access-nn2j7" (OuterVolumeSpecName: "kube-api-access-nn2j7") pod "5b41e63f-5eab-4b5f-afdc-b28569ff0e65" (UID: "5b41e63f-5eab-4b5f-afdc-b28569ff0e65"). InnerVolumeSpecName "kube-api-access-nn2j7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.278916 4919 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nn2j7\" (UniqueName: \"kubernetes.io/projected/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-kube-api-access-nn2j7\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.278943 4919 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-utilities\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.282369 4919 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5b41e63f-5eab-4b5f-afdc-b28569ff0e65" (UID: "5b41e63f-5eab-4b5f-afdc-b28569ff0e65"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.380371 4919 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b41e63f-5eab-4b5f-afdc-b28569ff0e65-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.554749 4919 generic.go:334] "Generic (PLEG): container finished" podID="5b41e63f-5eab-4b5f-afdc-b28569ff0e65" containerID="0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65" exitCode=0 Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.554839 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrf7q" event={"ID":"5b41e63f-5eab-4b5f-afdc-b28569ff0e65","Type":"ContainerDied","Data":"0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65"} Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.554899 4919 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vrf7q" event={"ID":"5b41e63f-5eab-4b5f-afdc-b28569ff0e65","Type":"ContainerDied","Data":"9a79186f738a1a7b3bf1857ebe67c833bc1c02274208dd3be078696adee99c59"} Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.554932 4919 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vrf7q" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.554942 4919 scope.go:117] "RemoveContainer" containerID="0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.582766 4919 scope.go:117] "RemoveContainer" containerID="41226cdd2cc4264d2f0818db27e258e29b19a3eb9893ab4e90ea94a20062e336" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.618414 4919 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vrf7q"] Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.628882 4919 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vrf7q"] Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.633130 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:10:00 crc kubenswrapper[4919]: E0930 21:10:00.633503 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.635952 4919 scope.go:117] "RemoveContainer" containerID="ab4095bff410ada9e3c3b513d0227ae8238f5b39bd8338ecf30ee3b045ff4600" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.677164 4919 scope.go:117] "RemoveContainer" containerID="0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65" Sep 30 21:10:00 crc kubenswrapper[4919]: E0930 21:10:00.678297 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65\": container with ID starting with 0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65 not found: ID does not exist" containerID="0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.678376 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65"} err="failed to get container status \"0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65\": rpc error: code = NotFound desc = could not find container \"0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65\": container with ID starting with 0ac7bd87f25e67a2f9bb2c787dd8993f04c01875083d0968cc9ce4569f19ff65 not found: ID does not exist" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.678409 4919 scope.go:117] "RemoveContainer" containerID="41226cdd2cc4264d2f0818db27e258e29b19a3eb9893ab4e90ea94a20062e336" Sep 30 21:10:00 crc kubenswrapper[4919]: E0930 21:10:00.678936 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41226cdd2cc4264d2f0818db27e258e29b19a3eb9893ab4e90ea94a20062e336\": container with ID starting with 41226cdd2cc4264d2f0818db27e258e29b19a3eb9893ab4e90ea94a20062e336 not found: ID does not exist" containerID="41226cdd2cc4264d2f0818db27e258e29b19a3eb9893ab4e90ea94a20062e336" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.678990 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41226cdd2cc4264d2f0818db27e258e29b19a3eb9893ab4e90ea94a20062e336"} err="failed to get container status \"41226cdd2cc4264d2f0818db27e258e29b19a3eb9893ab4e90ea94a20062e336\": rpc error: code = NotFound desc = could not find container \"41226cdd2cc4264d2f0818db27e258e29b19a3eb9893ab4e90ea94a20062e336\": container with ID starting with 41226cdd2cc4264d2f0818db27e258e29b19a3eb9893ab4e90ea94a20062e336 not found: ID does not exist" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.679022 4919 scope.go:117] "RemoveContainer" containerID="ab4095bff410ada9e3c3b513d0227ae8238f5b39bd8338ecf30ee3b045ff4600" Sep 30 21:10:00 crc kubenswrapper[4919]: E0930 21:10:00.679348 4919 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab4095bff410ada9e3c3b513d0227ae8238f5b39bd8338ecf30ee3b045ff4600\": container with ID starting with ab4095bff410ada9e3c3b513d0227ae8238f5b39bd8338ecf30ee3b045ff4600 not found: ID does not exist" containerID="ab4095bff410ada9e3c3b513d0227ae8238f5b39bd8338ecf30ee3b045ff4600" Sep 30 21:10:00 crc kubenswrapper[4919]: I0930 21:10:00.679374 4919 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab4095bff410ada9e3c3b513d0227ae8238f5b39bd8338ecf30ee3b045ff4600"} err="failed to get container status \"ab4095bff410ada9e3c3b513d0227ae8238f5b39bd8338ecf30ee3b045ff4600\": rpc error: code = NotFound desc = could not find container \"ab4095bff410ada9e3c3b513d0227ae8238f5b39bd8338ecf30ee3b045ff4600\": container with ID starting with ab4095bff410ada9e3c3b513d0227ae8238f5b39bd8338ecf30ee3b045ff4600 not found: ID does not exist" Sep 30 21:10:01 crc kubenswrapper[4919]: I0930 21:10:01.644109 4919 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b41e63f-5eab-4b5f-afdc-b28569ff0e65" path="/var/lib/kubelet/pods/5b41e63f-5eab-4b5f-afdc-b28569ff0e65/volumes" Sep 30 21:10:09 crc kubenswrapper[4919]: I0930 21:10:09.634028 4919 scope.go:117] "RemoveContainer" containerID="5dd38c43044b7b613eeb7ea2f51a81e0953afe80b50ea2050932404a460ee9e7" Sep 30 21:10:09 crc kubenswrapper[4919]: E0930 21:10:09.635190 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-proc\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=cloudkitty-proc pod=cloudkitty-proc-0_openstack(983be098-678e-4ecb-a684-7874ae171f14)\"" pod="openstack/cloudkitty-proc-0" podUID="983be098-678e-4ecb-a684-7874ae171f14" Sep 30 21:10:10 crc kubenswrapper[4919]: I0930 21:10:10.631908 4919 scope.go:117] "RemoveContainer" containerID="14dd0129d4b055b5427759c3b9603eb17001910a5d0175778ed426c9798997ae" Sep 30 21:10:10 crc kubenswrapper[4919]: E0930 21:10:10.632379 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-bs96r_openstack(0dde18df-d1bd-4b36-82af-cd0967cd942b)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-bs96r" podUID="0dde18df-d1bd-4b36-82af-cd0967cd942b" Sep 30 21:10:12 crc kubenswrapper[4919]: I0930 21:10:12.632430 4919 scope.go:117] "RemoveContainer" containerID="cc1cc10e4aa90a2823d73abe2144c054f3c2537ce94afcc21c6f350a30b609ac" Sep 30 21:10:12 crc kubenswrapper[4919]: E0930 21:10:12.632982 4919 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=gateway pod=cloudkitty-lokistack-gateway-89dc74b89-6jf89_openstack(53a8fa4c-0ba7-4e41-86e5-e4e767126bc3)\"" pod="openstack/cloudkitty-lokistack-gateway-89dc74b89-6jf89" podUID="53a8fa4c-0ba7-4e41-86e5-e4e767126bc3" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515067043553024455 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015067043553017372 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067034411016506 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067034412015457 5ustar corecore